file_name
stringlengths 5
52
| name
stringlengths 4
95
| original_source_type
stringlengths 0
23k
| source_type
stringlengths 9
23k
| source_definition
stringlengths 9
57.9k
| source
dict | source_range
dict | file_context
stringlengths 0
721k
| dependencies
dict | opens_and_abbrevs
listlengths 2
94
| vconfig
dict | interleaved
bool 1
class | verbose_type
stringlengths 1
7.42k
| effect
stringclasses 118
values | effect_flags
sequencelengths 0
2
| mutual_with
sequencelengths 0
11
| ideal_premises
sequencelengths 0
236
| proof_features
sequencelengths 0
1
| is_simple_lemma
bool 2
classes | is_div
bool 2
classes | is_proof
bool 2
classes | is_simply_typed
bool 2
classes | is_type
bool 2
classes | partial_definition
stringlengths 5
3.99k
| completed_definiton
stringlengths 1
1.63M
| isa_cross_project_example
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Hacl.HMAC_DRBG.fst | Hacl.HMAC_DRBG.mk_instantiate | val mk_instantiate: #a:supported_alg -> hmac:HMAC.compute_st a -> instantiate_st a | val mk_instantiate: #a:supported_alg -> hmac:HMAC.compute_st a -> instantiate_st a | let mk_instantiate #a hmac st
entropy_input_len entropy_input
nonce_len nonce
personalization_string_len personalization_string
=
let h0 = ST.get () in
push_frame();
let seed_material = create (entropy_input_len +! nonce_len +! personalization_string_len) (u8 0) in
copy (sub seed_material 0ul entropy_input_len) entropy_input;
copy (sub seed_material entropy_input_len nonce_len) nonce;
copy (sub seed_material (entropy_input_len +! nonce_len) personalization_string_len) personalization_string;
let State k v ctr = st in
memset k (u8 0) (hash_len a);
memset v (u8 1) (hash_len a);
let h1 = ST.get () in
assert (Seq.equal (as_seq h1 seed_material)
(Seq.append (as_seq h0 entropy_input) (Seq.append (as_seq h0 nonce)
(as_seq h0 personalization_string))));
assert (LSeq.equal (as_seq h1 k) (LSeq.create (hash_length a) (u8 0)));
assert (LSeq.equal (as_seq h1 v) (LSeq.create (hash_length a) (u8 1)));
ctr.(0ul) <- 1ul;
update hmac (entropy_input_len +! nonce_len +! personalization_string_len)
seed_material k v;
pop_frame() | {
"file_name": "code/drbg/Hacl.HMAC_DRBG.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 13,
"end_line": 188,
"start_col": 0,
"start_line": 165
} | module Hacl.HMAC_DRBG
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
open Spec.Hash.Definitions
open Lib.IntTypes
open Lib.Buffer
module HS = FStar.HyperStack
module B = LowStar.Buffer
module LSeq = Lib.Sequence
module HMAC = Hacl.HMAC
module S = Spec.HMAC_DRBG
friend Spec.HMAC_DRBG
unfold
let hash_len (a:supported_alg) = Hacl.Hash.Definitions.hash_len a
#set-options "--fuel 0 --ifuel 0 --z3rlimit 50"
inline_for_extraction noextract
val update_round: #a:supported_alg
-> hmac:HMAC.compute_st a
-> len:size_t
-> data:lbuffer uint8 len
-> n:uint8
-> k:lbuffer uint8 (hash_len a)
-> v:lbuffer uint8 (hash_len a)
-> Stack unit
(requires fun h0 ->
live h0 k /\ live h0 v /\ live h0 data /\
disjoint k v /\
// HMAC input length must fit in size_t
hash_length a + 1 + uint_v len + block_length a < pow2 32)
(ensures fun h0 _ h1 ->
S.hmac_input_bound a;
as_seq h1 k == Spec.Agile.HMAC.hmac a
(as_seq h0 k)
(Seq.append (as_seq h0 v) (Seq.cons n (as_seq h0 data))) /\
as_seq h1 v == Spec.Agile.HMAC.hmac a (as_seq h1 k) (as_seq h0 v) /\
modifies2 k v h0 h1)
let update_round #a hmac len data n k v =
let h0 = ST.get() in
push_frame();
let input_len = hash_len a +! 1ul +! len in
let input = create input_len (u8 0) in
let k' = sub input 0ul (hash_len a) in
copy k' v;
if len <> 0ul then copy (sub input (hash_len a +! 1ul) len) data;
input.(hash_len a) <- n;
let h1 = ST.get() in
assert (Seq.equal (as_seq h1 input)
(Seq.append (as_seq h0 v) (Seq.cons n (as_seq h0 data))));
S.hmac_input_bound a;
hmac k' k (hash_len a) input input_len;
hmac v k' (hash_len a) v (hash_len a);
copy k k';
pop_frame()
inline_for_extraction noextract
val update: #a:supported_alg
-> hmac:HMAC.compute_st a
-> len:size_t
-> data:lbuffer uint8 len
-> k:lbuffer uint8 (hash_len a)
-> v:lbuffer uint8 (hash_len a)
-> Stack unit
(requires fun h0 ->
live h0 data /\ live h0 k /\ live h0 v /\
disjoint k v /\ disjoint k data /\ disjoint v data /\
hash_length a + 1 + uint_v len + block_length a < pow2 32)
(ensures fun h0 _ h1 ->
S.hmac_input_bound a;
let k', v' = S.update #a (as_seq h0 data) (as_seq h0 k) (as_seq h0 v) in
modifies2 k v h0 h1 /\
as_seq h1 k == k' /\
as_seq h1 v == v')
let update #a hmac len data k v =
update_round hmac len data (u8 0) k v;
if len <> 0ul then
update_round hmac len data (u8 1) k v
noeq
type state (a:supported_alg) =
| State:
k:lbuffer uint8 (hash_len a)
-> v:lbuffer uint8 (hash_len a)
-> reseed_counter:lbuffer size_t 1ul
{disjoint k v /\ disjoint k reseed_counter /\ disjoint v reseed_counter}
-> state a
let freeable #a st =
let k:B.buffer uint8 = st.k in
let v:B.buffer uint8 = st.v in
let ctr:B.buffer size_t = st.reseed_counter in
B.freeable k /\ B.freeable v /\ B.freeable ctr
let footprint #a st =
let k:B.buffer uint8 = st.k in
let v:B.buffer uint8 = st.v in
let ctr:B.buffer size_t = st.reseed_counter in
B.loc_addr_of_buffer k |+| B.loc_addr_of_buffer v |+| B.loc_addr_of_buffer ctr
let invariant #a st h =
live h st.k /\ live h st.v /\ live h st.reseed_counter /\ (
// JP: the disjoint predicate from lib hardcodes loc_buffer instead of
// loc_addr_of_buffer, which prevents us from writing a proper free function
// (probably why it wasn't written here in the first place)... we add on top
// of the lib-style predicate a non-lib-style predicate which allows writing
// an actual free function
let k = st.k <: B.buffer uint8 in
let v = st.v <: B.buffer uint8 in
let ctr = st.reseed_counter <: B.buffer size_t in
B.(all_disjoint [ loc_addr_of_buffer k; loc_addr_of_buffer v; loc_addr_of_buffer ctr ]))
let repr #a st h =
S.State (as_seq h st.k) (as_seq h st.v) (v (bget h st.reseed_counter 0))
let alloca a =
let k =
match a with
| SHA1 -> create (hash_len SHA1) (u8 0)
| SHA2_256 -> create (hash_len SHA2_256) (u8 0)
| SHA2_384 -> create (hash_len SHA2_384) (u8 0)
| SHA2_512 -> create (hash_len SHA2_512) (u8 0)
in
let v =
match a with
| SHA1 -> create (hash_len SHA1) (u8 0)
| SHA2_256 -> create (hash_len SHA2_256) (u8 0)
| SHA2_384 -> create (hash_len SHA2_384) (u8 0)
| SHA2_512 -> create (hash_len SHA2_512) (u8 0)
in
let ctr = create 1ul 1ul in
State k v ctr
let create_in a r =
let k:B.buffer uint8 =
match a with
| SHA1 -> B.malloc r (u8 0) (hash_len SHA1)
| SHA2_256 -> B.malloc r (u8 0) (hash_len SHA2_256)
| SHA2_384 -> B.malloc r (u8 0) (hash_len SHA2_384)
| SHA2_512 -> B.malloc r (u8 0) (hash_len SHA2_512)
in
let v:B.buffer uint8 =
match a with
| SHA1 -> B.malloc r (u8 0) (hash_len SHA1)
| SHA2_256 -> B.malloc r (u8 0) (hash_len SHA2_256)
| SHA2_384 -> B.malloc r (u8 0) (hash_len SHA2_384)
| SHA2_512 -> B.malloc r (u8 0) (hash_len SHA2_512)
in
let ctr:B.buffer size_t = B.malloc r 1ul 1ul in
State k v ctr
#push-options "--z3rlimit 200" | {
"checked_file": "/",
"dependencies": [
"Spec.HMAC_DRBG.fst.checked",
"Spec.HMAC_DRBG.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.HMAC.fsti.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.HMAC_DRBG.fst"
} | [
{
"abbrev": true,
"full_module": "Spec.HMAC_DRBG",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.HMAC",
"short_module": "HMAC"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Spec.HMAC_DRBG",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.HMAC",
"short_module": "HMAC"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Hash.Definitions",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | hmac: Hacl.HMAC.compute_st a -> Hacl.HMAC_DRBG.instantiate_st a | Prims.Tot | [
"total"
] | [] | [
"Hacl.HMAC_DRBG.supported_alg",
"Hacl.HMAC.compute_st",
"Hacl.HMAC_DRBG.state",
"Lib.IntTypes.size_t",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"Hacl.HMAC_DRBG.hash_len",
"FStar.UInt32.__uint_to_t",
"Prims.l_and",
"Lib.Buffer.disjoint",
"Lib.Buffer.MUT",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.HMAC_DRBG.update",
"Lib.IntTypes.op_Plus_Bang",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Lib.Buffer.op_Array_Assignment",
"Prims._assert",
"Lib.Sequence.equal",
"Lib.IntTypes.v",
"Lib.Buffer.as_seq",
"Lib.Sequence.create",
"Spec.Hash.Definitions.hash_length",
"Lib.IntTypes.u8",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.append",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Lib.Buffer.memset",
"Lib.Buffer.copy",
"Lib.Buffer.lbuffer_t",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Lib.Buffer.sub",
"Lib.IntTypes.add",
"Lib.Buffer.create",
"FStar.HyperStack.ST.push_frame"
] | [] | false | false | false | false | false | let mk_instantiate
#a
hmac
st
entropy_input_len
entropy_input
nonce_len
nonce
personalization_string_len
personalization_string
=
| let h0 = ST.get () in
push_frame ();
let seed_material = create (entropy_input_len +! nonce_len +! personalization_string_len) (u8 0) in
copy (sub seed_material 0ul entropy_input_len) entropy_input;
copy (sub seed_material entropy_input_len nonce_len) nonce;
copy (sub seed_material (entropy_input_len +! nonce_len) personalization_string_len)
personalization_string;
let State k v ctr = st in
memset k (u8 0) (hash_len a);
memset v (u8 1) (hash_len a);
let h1 = ST.get () in
assert (Seq.equal (as_seq h1 seed_material)
(Seq.append (as_seq h0 entropy_input)
(Seq.append (as_seq h0 nonce) (as_seq h0 personalization_string))));
assert (LSeq.equal (as_seq h1 k) (LSeq.create (hash_length a) (u8 0)));
assert (LSeq.equal (as_seq h1 v) (LSeq.create (hash_length a) (u8 1)));
ctr.(0ul) <- 1ul;
update hmac (entropy_input_len +! nonce_len +! personalization_string_len) seed_material k v;
pop_frame () | false |
Pulse.Checker.Prover.Match.fst | Pulse.Checker.Prover.Match.unify | val unify (g: env) (uvs: env{disjoint uvs g}) (p q: term)
: T.Tac
(ss: PS.ss_t{(PS.dom ss) `Set.subset` (freevars q)} &
option (RT.equiv (elab_env g) (elab_term p) (elab_term ss.(q)))) | val unify (g: env) (uvs: env{disjoint uvs g}) (p q: term)
: T.Tac
(ss: PS.ss_t{(PS.dom ss) `Set.subset` (freevars q)} &
option (RT.equiv (elab_env g) (elab_term p) (elab_term ss.(q)))) | let unify (g:env) (uvs:env { disjoint uvs g})
(p q:term)
: T.Tac (ss:PS.ss_t { PS.dom ss `Set.subset` freevars q } &
option (RT.equiv (elab_env g) (elab_term p) (elab_term ss.(q)))) =
let ss = try_solve_uvars g uvs p q in
let q_ss = readback_ty (elab_term ss.(q)) in
match q_ss with
| None -> (| ss, None |)
| Some q ->
if eq_tm p q
then (| ss, Some (RT.Rel_refl _ _ _) |)
else if contains_uvar q uvs g
then (| ss, None |)
else if eligible_for_smt_equality g p q
then let v0 = elab_term p in
let v1 = elab_term q in
match check_equiv_now (elab_env g) v0 v1 with
| Some token, _ -> (| ss, Some (RT.Rel_eq_token _ _ _ (FStar.Squash.return_squash token)) |)
| None, _ -> (| ss, None |)
else (| ss, None |) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Match.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 23,
"end_line": 286,
"start_col": 0,
"start_line": 266
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Match
open Pulse.Syntax
open Pulse.Typing
open Pulse.Typing.Combinators
open Pulse.Typing.Metatheory
open Pulse.Typing.Util
open Pulse.Checker.VPropEquiv
open Pulse.Checker.Prover.Base
open Pulse.Checker.Prover.Util
module RU = Pulse.RuntimeUtils
module L = FStar.List.Tot
module R = FStar.Reflection.V2
module TermEq = FStar.Reflection.V2.TermEq
module T = FStar.Tactics.V2
module RUtil = Pulse.Reflection.Util
module P = Pulse.Syntax.Printer
module PS = Pulse.Checker.Prover.Substs
module Metatheory = Pulse.Typing.Metatheory
let equational (t:term) : bool =
match t.t with
| Tm_FStar host_term ->
(match R.inspect_ln host_term with
| R.Tv_Match _ _ _ -> true
| _ -> false)
| _ -> false
let type_of_fv (g:env) (fv:R.fv)
: T.Tac (option R.term)
= let n = R.inspect_fv fv in
match R.lookup_typ (fstar_env g) n with
| None -> None
| Some se ->
match R.inspect_sigelt se with
| R.Unk -> None
| R.Sg_Let _ lbs -> (
L.tryPick
(fun lb ->
let lbv = R.inspect_lb lb in
if R.inspect_fv lbv.lb_fv = n
then Some lbv.lb_typ
else None)
lbs
)
| R.Sg_Val _ _ t -> Some t
| R.Sg_Inductive _nm _univs params typ _ -> None
let is_smt_fallback (t:R.term) : bool =
match R.inspect_ln t with
| R.Tv_FVar fv ->
let name = R.inspect_fv fv in
name = ["Steel";"Effect";"Common";"smt_fallback"] ||
name = ["Pulse"; "Lib"; "Core"; "equate_by_smt"]
| _ -> false
(*
When comparing t0 =?= t1, if they are not syntactically equal, we
have to decide whether or not we should fire an SMT query to compare
them for provable equality.
The criterion is as follows:
1. We allow an SMT query if either t0 or t1 is "equational". For now, that means
that either is a match expression.
2. Otherwise, if they are both applications of `f v0...vn` and `f u0...un`
of the same head symbol `f`, a top-level constant, then we check if the
type of `f` decorates any of its binders with the `smt_fallback` attribute.
- If none of them are marked as such,
then we check if `f v0...` is syntactically equal to `f u0...`
and allow an SMT query to check if vn = vm. That is, the default behavior
for predicates is that they *last* argument is eligible for SMT equality.
- Otherwise, for each binder that is NOT marked as `smt_fallback`, we check
if the corresponding argument is syntactically equal. If so, we allow
t0 and t1 to be compared for SMT equality.
For example, Steel.ST.Reference.pts_to is defined like so:
/// For instance, [pts_to r (sum_perm (half_perm p) (half_perm p)) (v + 1)]
/// is unifiable with [pts_to r p (1 + v)]
val pts_to (#a:Type0)
(r:ref a)
([@@@smt_fallback] p:perm)
([@@@smt_fallback] v:a)
: vprop
*)
let eligible_for_smt_equality (g:env) (t0 t1:term)
: T.Tac bool
= let either_equational () = equational t0 || equational t1 in
let head_eq (t0 t1:R.term) =
match R.inspect_ln t0, R.inspect_ln t1 with
| R.Tv_App h0 _, R.Tv_App h1 _ ->
TermEq.term_eq h0 h1
| _ -> false
in
match t0.t, t1.t with
| Tm_FStar t0, Tm_FStar t1 -> (
let h0, args0 = R.collect_app_ln t0 in
let h1, args1 = R.collect_app_ln t1 in
if TermEq.term_eq h0 h1 && L.length args0 = L.length args1
then (
match R.inspect_ln h0 with
| R.Tv_FVar fv
| R.Tv_UInst fv _ -> (
match type_of_fv g fv with
| None -> either_equational()
| Some t ->
let bs, _ = R.collect_arr_ln_bs t in
let is_smt_fallback (b:R.binder) =
let bview = R.inspect_binder b in
L.existsb is_smt_fallback bview.attrs
in
let some_fallbacks, fallbacks =
L.fold_right
(fun b (some_fallbacks, bs) ->
if is_smt_fallback b
then true, true::bs
else some_fallbacks, false::bs)
bs (false, [])
in
if not some_fallbacks
then (
//if none of the binders are marked fallback
//then, by default, consider only the last argument as
//fallback
head_eq t0 t1
)
else (
let rec aux args0 args1 fallbacks =
match args0, args1, fallbacks with
| (a0, _)::args0, (a1, _)::args1, b::fallbacks ->
if b
then aux args0 args1 fallbacks
else if not (TermEq.term_eq a0 a1)
then false
else aux args0 args1 fallbacks
| [], [], [] -> true
| _ -> either_equational() //unequal lengths
in
aux args0 args1 fallbacks
)
)
| _ -> either_equational ()
)
else either_equational ()
)
| Tm_ForallSL _ _ _, Tm_ForallSL _ _ _ -> true
| _ -> either_equational ()
let refl_uvar (t:R.term) (uvs:env) : option var =
let open R in
match inspect_ln t with
| Tv_Var v ->
let {uniq=n} = inspect_namedv v in
if contains uvs n then Some n else None
| _ -> None
let is_uvar (t:term) (uvs:env) : option var =
match t.t with
| Tm_FStar t -> refl_uvar t uvs
| _ -> None
let contains_uvar (t:term) (uvs:env) (g:env) : T.Tac bool =
not (check_disjoint uvs (freevars t))
let is_reveal_uvar (t:term) (uvs:env) : option (universe & term & var) =
match is_pure_app t with
| Some (hd, None, arg) ->
(match is_pure_app hd with
| Some (hd, Some Implicit, ty) ->
let arg_uvar_index_opt = is_uvar arg uvs in
(match arg_uvar_index_opt with
| Some n ->
(match is_fvar hd with
| Some (l, [u]) ->
if l = RUtil.reveal_lid
then Some (u, ty, n)
else None
| _ -> None)
| _ -> None)
| _ -> None)
| _ -> None
let is_reveal (t:term) : bool =
match leftmost_head t with
| Some hd ->
(match is_fvar hd with
| Some (l, [_]) -> l = RUtil.reveal_lid
| _ -> false)
| _ -> false
module RT = FStar.Reflection.Typing
//
// Call into the F* unifier to solve for uvs by unifying p and q
//
let try_solve_uvars (g:env) (uvs:env { disjoint uvs g }) (p q:term)
: T.Tac (ss:PS.ss_t { PS.dom ss `Set.subset` freevars q }) =
let uvs = uvs
|> bindings_with_ppname
|> L.rev
|> L.map (fun (({name}, x, t):(ppname & _ & _)) ->
let nv_view = {
R.uniq = x;
R.sort = elab_term t;
R.ppname = name;
} in
let nv = R.pack_namedv nv_view in
nv, elab_term t
) in
let l, issues = RU.with_context (get_context g) (fun _ ->
T.try_unify (elab_env g) uvs (elab_term p) (elab_term q))
in
T.log_issues issues;
// build ss
let ss = PS.empty in
assume (PS.dom ss `Set.subset` freevars q);
match l with
| None -> ss
| Some l ->
let q_names = freevars q in
L.fold_left (fun (ss:(ss:PS.ss_t { PS.dom ss `Set.subset` freevars q })) (x, t) ->
let nv_view = R.inspect_namedv x in
let topt = readback_ty t in
match topt with
| Some t ->
if Set.mem nv_view.uniq q_names &&
not (Set.mem nv_view.uniq (PS.dom ss))
then begin
let ss_new = PS.push ss nv_view.uniq t in
assert (nv_view.uniq `Set.mem` freevars q);
assert (PS.dom ss `Set.subset` freevars q);
assume (PS.dom ss_new `Set.subset` freevars q);
ss_new
end
else ss
| None -> ss
) ss l | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Util.fsti.checked",
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Combinators.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.Printer.fsti.checked",
"Pulse.Syntax.fst.checked",
"Pulse.RuntimeUtils.fsti.checked",
"Pulse.Reflection.Util.fst.checked",
"Pulse.Readback.fsti.checked",
"Pulse.Checker.VPropEquiv.fsti.checked",
"Pulse.Checker.Prover.Util.fsti.checked",
"Pulse.Checker.Prover.Substs.fsti.checked",
"Pulse.Checker.Prover.Base.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Reflection.V2.TermEq.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Match.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "Pulse.Syntax.Printer",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Pulse.Reflection.Util",
"short_module": "RUtil"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2.TermEq",
"short_module": "TermEq"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover.Util",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.VPropEquiv",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Util",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Metatheory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
g: Pulse.Typing.Env.env ->
uvs: Pulse.Typing.Env.env{Pulse.Typing.Env.disjoint uvs g} ->
p: Pulse.Syntax.Base.term ->
q: Pulse.Syntax.Base.term
-> FStar.Tactics.Effect.Tac
(Prims.dtuple2 (ss:
Pulse.Checker.Prover.Substs.ss_t
{FStar.Set.subset (Pulse.Checker.Prover.Substs.dom ss) (Pulse.Syntax.Naming.freevars q)}
)
(fun ss ->
FStar.Pervasives.Native.option (FStar.Reflection.Typing.equiv (Pulse.Typing.elab_env g)
(Pulse.Elaborate.Pure.elab_term p)
(Pulse.Elaborate.Pure.elab_term ss.(q))))) | FStar.Tactics.Effect.Tac | [] | [] | [
"Pulse.Typing.Env.env",
"Pulse.Typing.Env.disjoint",
"Pulse.Syntax.Base.term",
"Prims.Mkdtuple2",
"Pulse.Checker.Prover.Substs.ss_t",
"FStar.Set.subset",
"Pulse.Syntax.Base.var",
"Pulse.Checker.Prover.Substs.dom",
"Pulse.Syntax.Naming.freevars",
"FStar.Pervasives.Native.option",
"FStar.Reflection.Typing.equiv",
"Pulse.Typing.elab_env",
"Pulse.Elaborate.Pure.elab_term",
"Pulse.Checker.Prover.Base.op_Array_Access",
"FStar.Pervasives.Native.None",
"Prims.dtuple2",
"Prims.eq2",
"FStar.Stubs.Reflection.Types.term",
"Pulse.Syntax.Base.eq_tm",
"FStar.Pervasives.Native.Some",
"FStar.Reflection.Typing.Rel_refl",
"FStar.Reflection.Typing.R_Eq",
"Prims.bool",
"FStar.Stubs.Tactics.Types.equiv_token",
"FStar.Stubs.Tactics.Types.issues",
"FStar.Reflection.Typing.Rel_eq_token",
"FStar.Squash.return_squash",
"FStar.Pervasives.Native.tuple2",
"Pulse.Typing.Util.check_equiv_now",
"Pulse.Checker.Prover.Match.eligible_for_smt_equality",
"Pulse.Checker.Prover.Match.contains_uvar",
"Pulse.Readback.readback_ty",
"Pulse.Checker.Prover.Match.try_solve_uvars"
] | [] | false | true | false | false | false | let unify (g: env) (uvs: env{disjoint uvs g}) (p q: term)
: T.Tac
(ss: PS.ss_t{(PS.dom ss) `Set.subset` (freevars q)} &
option (RT.equiv (elab_env g) (elab_term p) (elab_term ss.(q)))) =
| let ss = try_solve_uvars g uvs p q in
let q_ss = readback_ty (elab_term ss.(q)) in
match q_ss with
| None -> (| ss, None |)
| Some q ->
if eq_tm p q
then (| ss, Some (RT.Rel_refl _ _ _) |)
else
if contains_uvar q uvs g
then (| ss, None |)
else
if eligible_for_smt_equality g p q
then
let v0 = elab_term p in
let v1 = elab_term q in
match check_equiv_now (elab_env g) v0 v1 with
| Some token, _ -> (| ss, Some (RT.Rel_eq_token _ _ _ (FStar.Squash.return_squash token)) |)
| None, _ -> (| ss, None |)
else (| ss, None |) | false |
InjectiveTypeFormers.Explicit.fst | InjectiveTypeFormers.Explicit.pa | val pa : x: Type -> a: (_: Type -> Type0) -> Prims.logical | let pa (x:Type u#1) (a:(Type u#1 -> Type u#0)) =
i a == x /\ ~(a x) | {
"file_name": "examples/paradoxes/InjectiveTypeFormers.Explicit.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 20,
"end_line": 19,
"start_col": 0,
"start_line": 18
} | module InjectiveTypeFormers.Explicit
let coerce (x:'a{'a == 'b}) : 'b = x
let lemma_of_squash (x:squash 'a) : Lemma 'a = ()
type i (f : Type u#1 -> Type u#0) : Type u#1 =
| Mkinj : i f
[@@(expect_failure [19])]
let isInj (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
= ()
assume
val isInj_admit (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "InjectiveTypeFormers.Explicit.fst"
} | [
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Type -> a: (_: Type -> Type0) -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Prims.l_and",
"Prims.eq2",
"InjectiveTypeFormers.Explicit.i",
"Prims.l_not",
"Prims.logical"
] | [] | false | false | false | true | true | let pa (x: Type u#1) (a: (Type u#1 -> Type u#0)) =
| i a == x /\ ~(a x) | false |
|
InjectiveTypeFormers.Explicit.fst | InjectiveTypeFormers.Explicit.coerce | val coerce (x: 'a{'a == 'b}) : 'b | val coerce (x: 'a{'a == 'b}) : 'b | let coerce (x:'a{'a == 'b}) : 'b = x | {
"file_name": "examples/paradoxes/InjectiveTypeFormers.Explicit.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 36,
"end_line": 3,
"start_col": 0,
"start_line": 3
} | module InjectiveTypeFormers.Explicit | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "InjectiveTypeFormers.Explicit.fst"
} | [
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: 'a{'a == 'b} -> 'b | Prims.Tot | [
"total"
] | [] | [
"Prims.eq2"
] | [] | false | false | false | false | false | let coerce (x: 'a{'a == 'b}) : 'b =
| x | false |
Hacl.Impl.Ed25519.Group.fst | Hacl.Impl.Ed25519.Group.mk_ed25519_concrete_ops | val mk_ed25519_concrete_ops:BE.concrete_ops U64 20ul 0ul | val mk_ed25519_concrete_ops:BE.concrete_ops U64 20ul 0ul | let mk_ed25519_concrete_ops : BE.concrete_ops U64 20ul 0ul = {
BE.to = mk_to_ed25519_comm_monoid;
BE.lone = point_zero;
BE.lmul = point_add;
BE.lsqr = point_double;
} | {
"file_name": "code/ed25519/Hacl.Impl.Ed25519.Group.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 1,
"end_line": 68,
"start_col": 0,
"start_line": 63
} | module Hacl.Impl.Ed25519.Group
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum25519
open Hacl.Impl.Ed25519.PointConstants
module LSeq = Lib.Sequence
module F51 = Hacl.Impl.Ed25519.Field51
module BE = Hacl.Impl.Exponentiation.Definitions
module S = Spec.Ed25519
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
unfold
let a_spec = S.aff_point_c
unfold
let refl (a:LSeq.lseq uint64 20{F51.linv a}) : GTot a_spec =
S.to_aff_point (F51.refl_ext_point a)
unfold
let linv_ctx (a:LSeq.lseq uint64 0) : Type0 = True
inline_for_extraction noextract
let mk_to_ed25519_comm_monoid : BE.to_comm_monoid U64 20ul 0ul = {
BE.a_spec = a_spec;
BE.comm_monoid = S.mk_ed25519_comm_monoid;
BE.linv_ctx = linv_ctx;
BE.linv = F51.linv;
BE.refl = refl;
}
inline_for_extraction noextract
val point_add : BE.lmul_st U64 20ul 0ul mk_to_ed25519_comm_monoid
let point_add ctx x y xy =
let h0 = ST.get () in
Spec.Ed25519.Lemmas.to_aff_point_add_lemma
(F51.refl_ext_point (as_seq h0 x)) (F51.refl_ext_point (as_seq h0 y));
Hacl.Impl.Ed25519.PointAdd.point_add xy x y
inline_for_extraction noextract
val point_double : BE.lsqr_st U64 20ul 0ul mk_to_ed25519_comm_monoid
let point_double ctx x xx =
let h0 = ST.get () in
Spec.Ed25519.Lemmas.to_aff_point_double_lemma (F51.refl_ext_point (as_seq h0 x));
Hacl.Impl.Ed25519.PointDouble.point_double xx x
inline_for_extraction noextract
val point_zero : BE.lone_st U64 20ul 0ul mk_to_ed25519_comm_monoid
let point_zero ctx one = make_point_inf one | {
"checked_file": "/",
"dependencies": [
"Spec.Ed25519.Lemmas.fsti.checked",
"Spec.Ed25519.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Exponentiation.Definitions.fst.checked",
"Hacl.Impl.Ed25519.PointDouble.fst.checked",
"Hacl.Impl.Ed25519.PointConstants.fst.checked",
"Hacl.Impl.Ed25519.PointAdd.fst.checked",
"Hacl.Impl.Ed25519.Field51.fst.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Ed25519.Group.fst"
} | [
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation.Definitions",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.PointConstants",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Hacl.Impl.Exponentiation.Definitions.concrete_ops Lib.IntTypes.U64
(20ul <: FStar.UInt32.t)
(0ul <: FStar.UInt32.t) | Prims.Tot | [
"total"
] | [] | [
"Hacl.Impl.Exponentiation.Definitions.Mkconcrete_ops",
"Lib.IntTypes.U64",
"FStar.UInt32.uint_to_t",
"FStar.Ghost.hide",
"Hacl.Impl.Exponentiation.Definitions.to_comm_monoid",
"Hacl.Impl.Ed25519.Group.mk_to_ed25519_comm_monoid",
"Hacl.Impl.Ed25519.Group.point_zero",
"Hacl.Impl.Ed25519.Group.point_add",
"Hacl.Impl.Ed25519.Group.point_double"
] | [] | false | false | false | false | false | let mk_ed25519_concrete_ops:BE.concrete_ops U64 20ul 0ul =
| {
BE.to = mk_to_ed25519_comm_monoid;
BE.lone = point_zero;
BE.lmul = point_add;
BE.lsqr = point_double
} | false |
Hacl.Impl.Ed25519.Group.fst | Hacl.Impl.Ed25519.Group.refl | val refl (a: LSeq.lseq uint64 20 {F51.linv a}) : GTot a_spec | val refl (a: LSeq.lseq uint64 20 {F51.linv a}) : GTot a_spec | let refl (a:LSeq.lseq uint64 20{F51.linv a}) : GTot a_spec =
S.to_aff_point (F51.refl_ext_point a) | {
"file_name": "code/ed25519/Hacl.Impl.Ed25519.Group.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 39,
"end_line": 25,
"start_col": 0,
"start_line": 24
} | module Hacl.Impl.Ed25519.Group
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum25519
open Hacl.Impl.Ed25519.PointConstants
module LSeq = Lib.Sequence
module F51 = Hacl.Impl.Ed25519.Field51
module BE = Hacl.Impl.Exponentiation.Definitions
module S = Spec.Ed25519
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
unfold
let a_spec = S.aff_point_c | {
"checked_file": "/",
"dependencies": [
"Spec.Ed25519.Lemmas.fsti.checked",
"Spec.Ed25519.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Exponentiation.Definitions.fst.checked",
"Hacl.Impl.Ed25519.PointDouble.fst.checked",
"Hacl.Impl.Ed25519.PointConstants.fst.checked",
"Hacl.Impl.Ed25519.PointAdd.fst.checked",
"Hacl.Impl.Ed25519.Field51.fst.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Ed25519.Group.fst"
} | [
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation.Definitions",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.PointConstants",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: Lib.Sequence.lseq Lib.IntTypes.uint64 20 {Hacl.Impl.Ed25519.Field51.linv a}
-> Prims.GTot Hacl.Impl.Ed25519.Group.a_spec | Prims.GTot | [
"sometrivial"
] | [] | [
"Lib.Sequence.lseq",
"Lib.IntTypes.uint64",
"Hacl.Impl.Ed25519.Field51.linv",
"Spec.Ed25519.PointOps.to_aff_point",
"Hacl.Impl.Ed25519.Field51.refl_ext_point",
"Hacl.Impl.Ed25519.Group.a_spec"
] | [] | false | false | false | false | false | let refl (a: LSeq.lseq uint64 20 {F51.linv a}) : GTot a_spec =
| S.to_aff_point (F51.refl_ext_point a) | false |
InjectiveTypeFormers.Explicit.fst | InjectiveTypeFormers.Explicit.p | val p (x: Type u#1) : Type u#0 | val p (x: Type u#1) : Type u#0 | let p (x : Type u#1) : Type u#0 =
exists a. pa x a | {
"file_name": "examples/paradoxes/InjectiveTypeFormers.Explicit.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 18,
"end_line": 22,
"start_col": 0,
"start_line": 21
} | module InjectiveTypeFormers.Explicit
let coerce (x:'a{'a == 'b}) : 'b = x
let lemma_of_squash (x:squash 'a) : Lemma 'a = ()
type i (f : Type u#1 -> Type u#0) : Type u#1 =
| Mkinj : i f
[@@(expect_failure [19])]
let isInj (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
= ()
assume
val isInj_admit (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
let pa (x:Type u#1) (a:(Type u#1 -> Type u#0)) =
i a == x /\ ~(a x) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "InjectiveTypeFormers.Explicit.fst"
} | [
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Type -> Type0 | Prims.Tot | [
"total"
] | [] | [
"Prims.l_Exists",
"InjectiveTypeFormers.Explicit.pa"
] | [] | false | false | false | true | true | let p (x: Type u#1) : Type u#0 =
| exists a. pa x a | false |
InjectiveTypeFormers.Explicit.fst | InjectiveTypeFormers.Explicit.w | val w:i p | val w:i p | let w : i p = Mkinj | {
"file_name": "examples/paradoxes/InjectiveTypeFormers.Explicit.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 19,
"end_line": 24,
"start_col": 0,
"start_line": 24
} | module InjectiveTypeFormers.Explicit
let coerce (x:'a{'a == 'b}) : 'b = x
let lemma_of_squash (x:squash 'a) : Lemma 'a = ()
type i (f : Type u#1 -> Type u#0) : Type u#1 =
| Mkinj : i f
[@@(expect_failure [19])]
let isInj (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
= ()
assume
val isInj_admit (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
let pa (x:Type u#1) (a:(Type u#1 -> Type u#0)) =
i a == x /\ ~(a x)
let p (x : Type u#1) : Type u#0 =
exists a. pa x a | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "InjectiveTypeFormers.Explicit.fst"
} | [
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | InjectiveTypeFormers.Explicit.i InjectiveTypeFormers.Explicit.p | Prims.Tot | [
"total"
] | [] | [
"InjectiveTypeFormers.Explicit.Mkinj",
"InjectiveTypeFormers.Explicit.p"
] | [] | false | false | false | true | false | let w:i p =
| Mkinj | false |
InjectiveTypeFormers.Explicit.fst | InjectiveTypeFormers.Explicit.q | val q : Type | let q = i p | {
"file_name": "examples/paradoxes/InjectiveTypeFormers.Explicit.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 11,
"end_line": 26,
"start_col": 0,
"start_line": 26
} | module InjectiveTypeFormers.Explicit
let coerce (x:'a{'a == 'b}) : 'b = x
let lemma_of_squash (x:squash 'a) : Lemma 'a = ()
type i (f : Type u#1 -> Type u#0) : Type u#1 =
| Mkinj : i f
[@@(expect_failure [19])]
let isInj (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
= ()
assume
val isInj_admit (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
let pa (x:Type u#1) (a:(Type u#1 -> Type u#0)) =
i a == x /\ ~(a x)
let p (x : Type u#1) : Type u#0 =
exists a. pa x a
let w : i p = Mkinj | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "InjectiveTypeFormers.Explicit.fst"
} | [
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type | Prims.Tot | [
"total"
] | [] | [
"InjectiveTypeFormers.Explicit.i",
"InjectiveTypeFormers.Explicit.p"
] | [] | false | false | false | true | true | let q =
| i p | false |
|
InjectiveTypeFormers.Explicit.fst | InjectiveTypeFormers.Explicit.false_of_pq_squash | val false_of_pq_squash (pq: p q) : GTot False | val false_of_pq_squash (pq: p q) : GTot False | let false_of_pq_squash (pq: p q) : GTot False =
false_of_pq pq;
coerce (FStar.Squash.return_squash #Prims.empty (match () with)) | {
"file_name": "examples/paradoxes/InjectiveTypeFormers.Explicit.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 66,
"end_line": 43,
"start_col": 0,
"start_line": 41
} | module InjectiveTypeFormers.Explicit
let coerce (x:'a{'a == 'b}) : 'b = x
let lemma_of_squash (x:squash 'a) : Lemma 'a = ()
type i (f : Type u#1 -> Type u#0) : Type u#1 =
| Mkinj : i f
[@@(expect_failure [19])]
let isInj (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
= ()
assume
val isInj_admit (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
let pa (x:Type u#1) (a:(Type u#1 -> Type u#0)) =
i a == x /\ ~(a x)
let p (x : Type u#1) : Type u#0 =
exists a. pa x a
let w : i p = Mkinj
let _ = intro_ambient w
let q = i p
let _ = intro_ambient q
val false_of_pq : p q -> Lemma False
#push-options "--smtencoding.valid_intro true --smtencoding.valid_elim true"
let false_of_pq pq =
FStar.Classical.(
exists_elim
Prims.empty
(give_witness pq)
(fun (a:(Type u#1 -> Type u#0){i a == q /\ ~(a q)}) ->
isInj_admit p a w))
#pop-options | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "InjectiveTypeFormers.Explicit.fst"
} | [
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | pq: InjectiveTypeFormers.Explicit.p InjectiveTypeFormers.Explicit.q -> Prims.GTot Prims.l_False | Prims.GTot | [
"sometrivial"
] | [] | [
"InjectiveTypeFormers.Explicit.p",
"InjectiveTypeFormers.Explicit.q",
"InjectiveTypeFormers.Explicit.coerce",
"Prims.squash",
"Prims.empty",
"Prims.l_False",
"FStar.Squash.return_squash",
"Prims.unit",
"InjectiveTypeFormers.Explicit.false_of_pq"
] | [] | false | false | false | false | false | let false_of_pq_squash (pq: p q) : GTot False =
| false_of_pq pq;
coerce (FStar.Squash.return_squash #Prims.empty (match () with )) | false |
Hacl.Impl.Ed25519.Group.fst | Hacl.Impl.Ed25519.Group.mk_to_ed25519_comm_monoid | val mk_to_ed25519_comm_monoid:BE.to_comm_monoid U64 20ul 0ul | val mk_to_ed25519_comm_monoid:BE.to_comm_monoid U64 20ul 0ul | let mk_to_ed25519_comm_monoid : BE.to_comm_monoid U64 20ul 0ul = {
BE.a_spec = a_spec;
BE.comm_monoid = S.mk_ed25519_comm_monoid;
BE.linv_ctx = linv_ctx;
BE.linv = F51.linv;
BE.refl = refl;
} | {
"file_name": "code/ed25519/Hacl.Impl.Ed25519.Group.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 1,
"end_line": 37,
"start_col": 0,
"start_line": 31
} | module Hacl.Impl.Ed25519.Group
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum25519
open Hacl.Impl.Ed25519.PointConstants
module LSeq = Lib.Sequence
module F51 = Hacl.Impl.Ed25519.Field51
module BE = Hacl.Impl.Exponentiation.Definitions
module S = Spec.Ed25519
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
unfold
let a_spec = S.aff_point_c
unfold
let refl (a:LSeq.lseq uint64 20{F51.linv a}) : GTot a_spec =
S.to_aff_point (F51.refl_ext_point a)
unfold
let linv_ctx (a:LSeq.lseq uint64 0) : Type0 = True | {
"checked_file": "/",
"dependencies": [
"Spec.Ed25519.Lemmas.fsti.checked",
"Spec.Ed25519.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Exponentiation.Definitions.fst.checked",
"Hacl.Impl.Ed25519.PointDouble.fst.checked",
"Hacl.Impl.Ed25519.PointConstants.fst.checked",
"Hacl.Impl.Ed25519.PointAdd.fst.checked",
"Hacl.Impl.Ed25519.Field51.fst.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Ed25519.Group.fst"
} | [
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation.Definitions",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.PointConstants",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Hacl.Impl.Exponentiation.Definitions.to_comm_monoid Lib.IntTypes.U64
(20ul <: FStar.UInt32.t)
(0ul <: FStar.UInt32.t) | Prims.Tot | [
"total"
] | [] | [
"Hacl.Impl.Exponentiation.Definitions.Mkto_comm_monoid",
"Lib.IntTypes.U64",
"FStar.UInt32.uint_to_t",
"Hacl.Impl.Ed25519.Group.a_spec",
"Spec.Ed25519.mk_ed25519_comm_monoid",
"Hacl.Impl.Ed25519.Group.linv_ctx",
"Hacl.Impl.Ed25519.Field51.linv",
"Hacl.Impl.Ed25519.Group.refl"
] | [] | false | false | false | false | false | let mk_to_ed25519_comm_monoid:BE.to_comm_monoid U64 20ul 0ul =
| {
BE.a_spec = a_spec;
BE.comm_monoid = S.mk_ed25519_comm_monoid;
BE.linv_ctx = linv_ctx;
BE.linv = F51.linv;
BE.refl = refl
} | false |
InjectiveTypeFormers.Explicit.fst | InjectiveTypeFormers.Explicit.falso | val falso: Prims.unit -> Lemma False | val falso: Prims.unit -> Lemma False | let falso () : Lemma False = false_of_pq pq | {
"file_name": "examples/paradoxes/InjectiveTypeFormers.Explicit.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 43,
"end_line": 56,
"start_col": 0,
"start_line": 56
} | module InjectiveTypeFormers.Explicit
let coerce (x:'a{'a == 'b}) : 'b = x
let lemma_of_squash (x:squash 'a) : Lemma 'a = ()
type i (f : Type u#1 -> Type u#0) : Type u#1 =
| Mkinj : i f
[@@(expect_failure [19])]
let isInj (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
= ()
assume
val isInj_admit (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
let pa (x:Type u#1) (a:(Type u#1 -> Type u#0)) =
i a == x /\ ~(a x)
let p (x : Type u#1) : Type u#0 =
exists a. pa x a
let w : i p = Mkinj
let _ = intro_ambient w
let q = i p
let _ = intro_ambient q
val false_of_pq : p q -> Lemma False
#push-options "--smtencoding.valid_intro true --smtencoding.valid_elim true"
let false_of_pq pq =
FStar.Classical.(
exists_elim
Prims.empty
(give_witness pq)
(fun (a:(Type u#1 -> Type u#0){i a == q /\ ~(a q)}) ->
isInj_admit p a w))
#pop-options
let false_of_pq_squash (pq: p q) : GTot False =
false_of_pq pq;
coerce (FStar.Squash.return_squash #Prims.empty (match () with))
let not_pq : ~ (p q) =
FStar.Classical.give_witness
#(p q -> GTot False) false_of_pq_squash
let _ = intro_ambient not_pq
let pq : p q =
FStar.Classical.(
lemma_of_squash (not_pq);
exists_intro (pa q) p) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "InjectiveTypeFormers.Explicit.fst"
} | [
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: Prims.unit -> FStar.Pervasives.Lemma (ensures Prims.l_False) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.unit",
"InjectiveTypeFormers.Explicit.false_of_pq",
"InjectiveTypeFormers.Explicit.pq",
"Prims.l_True",
"Prims.squash",
"Prims.l_False",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let falso () : Lemma False =
| false_of_pq pq | false |
InjectiveTypeFormers.Explicit.fst | InjectiveTypeFormers.Explicit.pq | val pq:p q | val pq:p q | let pq : p q =
FStar.Classical.(
lemma_of_squash (not_pq);
exists_intro (pa q) p) | {
"file_name": "examples/paradoxes/InjectiveTypeFormers.Explicit.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 26,
"end_line": 54,
"start_col": 0,
"start_line": 51
} | module InjectiveTypeFormers.Explicit
let coerce (x:'a{'a == 'b}) : 'b = x
let lemma_of_squash (x:squash 'a) : Lemma 'a = ()
type i (f : Type u#1 -> Type u#0) : Type u#1 =
| Mkinj : i f
[@@(expect_failure [19])]
let isInj (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
= ()
assume
val isInj_admit (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
let pa (x:Type u#1) (a:(Type u#1 -> Type u#0)) =
i a == x /\ ~(a x)
let p (x : Type u#1) : Type u#0 =
exists a. pa x a
let w : i p = Mkinj
let _ = intro_ambient w
let q = i p
let _ = intro_ambient q
val false_of_pq : p q -> Lemma False
#push-options "--smtencoding.valid_intro true --smtencoding.valid_elim true"
let false_of_pq pq =
FStar.Classical.(
exists_elim
Prims.empty
(give_witness pq)
(fun (a:(Type u#1 -> Type u#0){i a == q /\ ~(a q)}) ->
isInj_admit p a w))
#pop-options
let false_of_pq_squash (pq: p q) : GTot False =
false_of_pq pq;
coerce (FStar.Squash.return_squash #Prims.empty (match () with))
let not_pq : ~ (p q) =
FStar.Classical.give_witness
#(p q -> GTot False) false_of_pq_squash
let _ = intro_ambient not_pq | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "InjectiveTypeFormers.Explicit.fst"
} | [
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | InjectiveTypeFormers.Explicit.p InjectiveTypeFormers.Explicit.q | Prims.Tot | [
"total"
] | [] | [
"FStar.Classical.exists_intro",
"InjectiveTypeFormers.Explicit.pa",
"InjectiveTypeFormers.Explicit.q",
"InjectiveTypeFormers.Explicit.p",
"Prims.unit",
"InjectiveTypeFormers.Explicit.lemma_of_squash",
"Prims.l_False",
"InjectiveTypeFormers.Explicit.not_pq"
] | [] | false | false | false | true | false | let pq:p q =
| let open FStar.Classical in
lemma_of_squash (not_pq);
exists_intro (pa q) p | false |
InjectiveTypeFormers.Explicit.fst | InjectiveTypeFormers.Explicit.not_pq | val not_pq:~(p q) | val not_pq:~(p q) | let not_pq : ~ (p q) =
FStar.Classical.give_witness
#(p q -> GTot False) false_of_pq_squash | {
"file_name": "examples/paradoxes/InjectiveTypeFormers.Explicit.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 43,
"end_line": 47,
"start_col": 0,
"start_line": 45
} | module InjectiveTypeFormers.Explicit
let coerce (x:'a{'a == 'b}) : 'b = x
let lemma_of_squash (x:squash 'a) : Lemma 'a = ()
type i (f : Type u#1 -> Type u#0) : Type u#1 =
| Mkinj : i f
[@@(expect_failure [19])]
let isInj (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
= ()
assume
val isInj_admit (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
let pa (x:Type u#1) (a:(Type u#1 -> Type u#0)) =
i a == x /\ ~(a x)
let p (x : Type u#1) : Type u#0 =
exists a. pa x a
let w : i p = Mkinj
let _ = intro_ambient w
let q = i p
let _ = intro_ambient q
val false_of_pq : p q -> Lemma False
#push-options "--smtencoding.valid_intro true --smtencoding.valid_elim true"
let false_of_pq pq =
FStar.Classical.(
exists_elim
Prims.empty
(give_witness pq)
(fun (a:(Type u#1 -> Type u#0){i a == q /\ ~(a q)}) ->
isInj_admit p a w))
#pop-options
let false_of_pq_squash (pq: p q) : GTot False =
false_of_pq pq;
coerce (FStar.Squash.return_squash #Prims.empty (match () with)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "InjectiveTypeFormers.Explicit.fst"
} | [
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ~(InjectiveTypeFormers.Explicit.p InjectiveTypeFormers.Explicit.q) | Prims.Tot | [
"total"
] | [] | [
"FStar.Classical.give_witness",
"InjectiveTypeFormers.Explicit.p",
"InjectiveTypeFormers.Explicit.q",
"Prims.l_False",
"InjectiveTypeFormers.Explicit.false_of_pq_squash"
] | [] | false | false | false | true | false | let not_pq:~(p q) =
| FStar.Classical.give_witness #(p q -> GTot False) false_of_pq_squash | false |
InjectiveTypeFormers.Explicit.fst | InjectiveTypeFormers.Explicit.false_of_pq | val false_of_pq : p q -> Lemma False | val false_of_pq : p q -> Lemma False | let false_of_pq pq =
FStar.Classical.(
exists_elim
Prims.empty
(give_witness pq)
(fun (a:(Type u#1 -> Type u#0){i a == q /\ ~(a q)}) ->
isInj_admit p a w)) | {
"file_name": "examples/paradoxes/InjectiveTypeFormers.Explicit.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 27,
"end_line": 38,
"start_col": 0,
"start_line": 32
} | module InjectiveTypeFormers.Explicit
let coerce (x:'a{'a == 'b}) : 'b = x
let lemma_of_squash (x:squash 'a) : Lemma 'a = ()
type i (f : Type u#1 -> Type u#0) : Type u#1 =
| Mkinj : i f
[@@(expect_failure [19])]
let isInj (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
= ()
assume
val isInj_admit (x:_) (y:_) (w:i x)
: Lemma (i x == i y ==> x == y)
let pa (x:Type u#1) (a:(Type u#1 -> Type u#0)) =
i a == x /\ ~(a x)
let p (x : Type u#1) : Type u#0 =
exists a. pa x a
let w : i p = Mkinj
let _ = intro_ambient w
let q = i p
let _ = intro_ambient q
val false_of_pq : p q -> Lemma False | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "InjectiveTypeFormers.Explicit.fst"
} | [
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "InjectiveTypeFormers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": true,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | pq: InjectiveTypeFormers.Explicit.p InjectiveTypeFormers.Explicit.q
-> FStar.Pervasives.Lemma (ensures Prims.l_False) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"InjectiveTypeFormers.Explicit.p",
"InjectiveTypeFormers.Explicit.q",
"FStar.Classical.exists_elim",
"Prims.empty",
"Prims.l_and",
"Prims.eq2",
"InjectiveTypeFormers.Explicit.i",
"Prims.l_not",
"FStar.Classical.give_witness",
"InjectiveTypeFormers.Explicit.isInj_admit",
"InjectiveTypeFormers.Explicit.w",
"Prims.squash",
"Prims.unit"
] | [] | false | false | true | false | false | let false_of_pq pq =
| let open FStar.Classical in
exists_elim Prims.empty
(give_witness pq)
(fun (a: (Type u#1 -> Type u#0){i a == q /\ ~(a q)}) -> isInj_admit p a w) | false |
CQueue.Cell.fst | CQueue.Cell.alloc_cell | val alloc_cell
(#a: Type0)
(data: a)
(next: ccell_ptrvalue a)
: Steel (ccell_lvalue a)
emp
(fun res -> ccell res)
(requires (fun _ -> True))
(ensures (fun _ res h' ->
h' (ccell res) == ({ vcell_data = data; vcell_next = next; })
)) | val alloc_cell
(#a: Type0)
(data: a)
(next: ccell_ptrvalue a)
: Steel (ccell_lvalue a)
emp
(fun res -> ccell res)
(requires (fun _ -> True))
(ensures (fun _ res h' ->
h' (ccell res) == ({ vcell_data = data; vcell_next = next; })
)) | let alloc_cell
#a data next
=
let rdata = ralloc data in
let rnext = ralloc next in
let res : ccell_lvalue a = ({ data = rdata; next = rnext; all_or_none_null = () }) in
change_equal_slprop (vptr rdata) (vptr (ccell_data res));
change_equal_slprop (vptr rnext) (vptr (ccell_next res));
intro_ccell res;
return res | {
"file_name": "share/steel/examples/steel/CQueue.Cell.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 12,
"end_line": 202,
"start_col": 0,
"start_line": 193
} | module CQueue.Cell
(* A Steel model of C cell structs *)
#push-options "--__no_positivity"
noeq
type mcell (a: Type0) = {
data: ref a;
next: ref (mcell a);
all_or_none_null: squash (is_null data == is_null next); // TODO: /\ freeable data /\ freeable next, if freeable is implemented as a pure space proposition rather than as stateful permissions (i.e. "freeable if you have the whole permission")
}
#pop-options
let ccell_ptrvalue a = mcell a
let ccell_ptrvalue_null a = {data = null; next = null; all_or_none_null = ()}
let ccell_ptrvalue_is_null #a x = is_null x.data
let ccell_data #a c =
c.data
let ccell_next #a c =
c.next
let ccell_is_lvalue_refine
(#a: Type)
(c: ccell_ptrvalue a)
(_: t_of emp)
: Tot prop
= ccell_ptrvalue_is_null c == false
let ccell_is_lvalue_rewrite
(#a: Type)
(c: ccell_ptrvalue a)
(_: normal (t_of (emp `vrefine` ccell_is_lvalue_refine c)))
: GTot (ccell_lvalue a)
= c
[@@ __steel_reduce__; __reduce__ ]
let ccell_is_lvalue0
(#a: Type)
(c: ccell_ptrvalue a)
: Tot vprop
= emp `vrefine` ccell_is_lvalue_refine c `vrewrite` ccell_is_lvalue_rewrite c
let ccell_is_lvalue_hp
(#a: Type)
(c: ccell_ptrvalue a)
: Tot (slprop u#1)
= hp_of (ccell_is_lvalue0 c)
let ccell_is_lvalue_sel
(#a: Type)
(c: ccell_ptrvalue a)
: GTot (selector (ccell_lvalue a) (ccell_is_lvalue_hp c))
= sel_of (ccell_is_lvalue0 c)
let intro_ccell_is_lvalue
#_ #a c
=
intro_vrefine emp (ccell_is_lvalue_refine c);
intro_vrewrite (emp `vrefine` ccell_is_lvalue_refine c) (ccell_is_lvalue_rewrite c);
change_slprop_rel
(ccell_is_lvalue0 c)
(ccell_is_lvalue c)
(fun x y -> x == y)
(fun m ->
assert_norm (hp_of (ccell_is_lvalue c) == hp_of (ccell_is_lvalue0 c));
assert_norm (sel_of (ccell_is_lvalue c) m === sel_of (ccell_is_lvalue0 c) m)
)
let elim_ccell_is_lvalue
#_ #a c
=
change_slprop_rel
(ccell_is_lvalue c)
(ccell_is_lvalue0 c)
(fun x y -> x == y)
(fun m ->
assert_norm (hp_of (ccell_is_lvalue c) == hp_of (ccell_is_lvalue0 c));
assert_norm (sel_of (ccell_is_lvalue c) m === sel_of (ccell_is_lvalue0 c) m)
);
elim_vrewrite (emp `vrefine` ccell_is_lvalue_refine c) (ccell_is_lvalue_rewrite c);
elim_vrefine emp (ccell_is_lvalue_refine c)
[@@ __steel_reduce__]
let ccell0 (a: Type0) (c: ccell_lvalue a) : Tot vprop =
(vptr (ccell_data c) `star` vptr (ccell_next c))
// unfold
let ccell_rewrite
(#a: Type0)
(c: ccell_ptrvalue a)
(x: dtuple2 (ccell_lvalue a) (vdep_payload (ccell_is_lvalue c) (ccell0 a)))
: GTot (vcell a)
= let p =
dsnd #(ccell_lvalue a) #(vdep_payload (ccell_is_lvalue c) (ccell0 a)) x
in
{
vcell_data = fst p;
vcell_next = snd p;
}
[@@ __steel_reduce__ ; __reduce__] // to avoid manual unfoldings through change_slprop
let ccell1
(#a: Type0)
(c: ccell_ptrvalue a)
: Tot vprop
= ccell_is_lvalue c `vdep` ccell0 a `vrewrite` ccell_rewrite c
let ccell_hp
#a c
= hp_of (ccell1 c)
let ccell_sel
#a c
= sel_of (ccell1 c)
let intro_ccell
#opened #a c
=
intro_ccell_is_lvalue c;
reveal_star (vptr (ccell_data c)) (vptr (ccell_next c));
intro_vdep
(ccell_is_lvalue c)
(vptr (ccell_data c) `star` vptr (ccell_next c))
(ccell0 a);
intro_vrewrite
(ccell_is_lvalue c `vdep` ccell0 a)
(ccell_rewrite c);
change_slprop_rel
(ccell1 c)
(ccell c)
(fun x y -> x == y)
(fun m ->
assert_norm (hp_of (ccell1 c) == ccell_hp c);
assert_norm (sel_of (ccell1 c) m === sel_of (ccell c) m)
)
let elim_ccell_ghost
#opened #a c
=
change_slprop_rel
(ccell c)
(ccell1 c)
(fun x y -> x == y)
(fun m ->
assert_norm (hp_of (ccell1 c) == ccell_hp c);
assert_norm (sel_of (ccell1 c) m === sel_of (ccell c) m)
);
elim_vrewrite
(ccell_is_lvalue c `vdep` ccell0 a)
(ccell_rewrite c);
let c' : Ghost.erased (ccell_lvalue a) = elim_vdep
(ccell_is_lvalue c)
(ccell0 a)
in
elim_ccell_is_lvalue c;
change_equal_slprop
(ccell0 a c')
(vptr (ccell_data (Ghost.reveal c')) `star` vptr (ccell_next (Ghost.reveal c')));
reveal_star (vptr (ccell_data (Ghost.reveal c'))) (vptr (ccell_next (Ghost.reveal c')));
c'
let elim_ccell
#opened #a c
=
let c2 = elim_ccell_ghost c in
let c : ccell_lvalue a = c in
change_equal_slprop (vptr (ccell_data c2)) (vptr (ccell_data c));
change_equal_slprop (vptr (ccell_next c2)) (vptr (ccell_next c));
return c
let ccell_not_null
#opened #a c
=
let c1 = elim_ccell_ghost c in
let c2 : ccell_lvalue a = c in
change_equal_slprop (vptr (ccell_data c1)) (vptr (ccell_data c2));
change_equal_slprop (vptr (ccell_next c1)) (vptr (ccell_next c2));
intro_ccell c2;
change_equal_slprop (ccell c2) (ccell c);
()
let ralloc (#a:Type0) (x:a) : Steel (ref a)
emp (fun r -> vptr r)
(requires fun _ -> True)
(ensures fun _ r h1 -> h1 (vptr r) == x /\ not (is_null r))
=
malloc x | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "CQueue.Cell.fst"
} | [
{
"abbrev": false,
"full_module": "Steel.Reference",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "CQueue",
"short_module": null
},
{
"abbrev": false,
"full_module": "CQueue",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | data: a -> next: CQueue.Cell.ccell_ptrvalue a -> Steel.Effect.Steel (CQueue.Cell.ccell_lvalue a) | Steel.Effect.Steel | [] | [] | [
"CQueue.Cell.ccell_ptrvalue",
"Steel.Effect.Atomic.return",
"CQueue.Cell.ccell_lvalue",
"FStar.Ghost.hide",
"FStar.Set.set",
"Steel.Memory.iname",
"FStar.Set.empty",
"CQueue.Cell.ccell",
"Steel.Effect.Common.vprop",
"Prims.unit",
"CQueue.Cell.intro_ccell",
"Steel.Effect.Atomic.change_equal_slprop",
"Steel.Reference.vptr",
"CQueue.Cell.mcell",
"CQueue.Cell.ccell_next",
"CQueue.Cell.ccell_data",
"CQueue.Cell.Mkmcell",
"Steel.Reference.ref",
"CQueue.Cell.ralloc"
] | [] | false | true | false | false | false | let alloc_cell #a data next =
| let rdata = ralloc data in
let rnext = ralloc next in
let res:ccell_lvalue a = ({ data = rdata; next = rnext; all_or_none_null = () }) in
change_equal_slprop (vptr rdata) (vptr (ccell_data res));
change_equal_slprop (vptr rnext) (vptr (ccell_next res));
intro_ccell res;
return res | false |
Pulse.Lib.Reference.fsti | Pulse.Lib.Reference.cond | val cond : b: Prims.bool -> p: Pulse.Lib.Core.vprop -> q: Pulse.Lib.Core.vprop -> Pulse.Lib.Core.vprop | let cond b (p q:vprop) = if b then p else q | {
"file_name": "share/steel/examples/pulse/lib/Pulse.Lib.Reference.fsti",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 43,
"end_line": 84,
"start_col": 0,
"start_line": 84
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Lib.Reference
open FStar.Tactics
open PulseCore.Observability
open Pulse.Lib.Core
open PulseCore.FractionalPermission
open FStar.Ghost
module U32 = FStar.UInt32
val ref ([@@@unused] a:Type u#0) : Type u#0
val pts_to
(#a:Type) (r:ref a)
(#[exact (`full_perm)] [@@@equate_by_smt] p:perm)
([@@@equate_by_smt] n:a)
: vprop
[@@deprecated "Reference.alloc is unsound; use Box.alloc instead"]
val alloc (#a:Type) (x:a)
: stt (ref a) emp (fun r -> pts_to r x)
val ( ! ) (#a:Type) (r:ref a) (#n:erased a) (#p:perm)
: stt a
(pts_to r #p n)
(fun x -> pts_to r #p n ** pure (reveal n == x))
val ( := ) (#a:Type) (r:ref a) (x:a) (#n:erased a)
: stt unit
(pts_to r n)
(fun _ -> pts_to r (hide x))
[@@deprecated "Reference.free is unsound; use Box.free instead"]
val free (#a:Type) (r:ref a) (#n:erased a)
: stt unit (pts_to r n) (fun _ -> emp)
val share (#a:Type) (r:ref a) (#v:erased a) (#p:perm)
: stt_ghost unit
(pts_to r #p v)
(fun _ ->
pts_to r #(half_perm p) v **
pts_to r #(half_perm p) v)
val gather (#a:Type) (r:ref a) (#x0 #x1:erased a) (#p0 #p1:perm)
: stt_ghost unit
(pts_to r #p0 x0 ** pts_to r #p1 x1)
(fun _ -> pts_to r #(sum_perm p0 p1) x0 ** pure (x0 == x1))
(* Share/gather specialized to half permission *)
val share2 (#a:Type) (r:ref a) (#v:erased a)
: stt_ghost unit
(pts_to r v)
(fun _ -> pts_to r #one_half v ** pts_to r #one_half v)
val gather2 (#a:Type) (r:ref a) (#x0 #x1:erased a)
: stt_ghost unit
(pts_to r #one_half x0 ** pts_to r #one_half x1)
(fun _ -> pts_to r x0 ** pure (x0 == x1))
val read_atomic (r:ref U32.t) (#n:erased U32.t) (#p:perm)
: stt_atomic U32.t #Observable emp_inames
(pts_to r #p n)
(fun x -> pts_to r #p n ** pure (reveal n == x))
val write_atomic (r:ref U32.t) (x:U32.t) (#n:erased U32.t)
: stt_atomic unit #Observable emp_inames
(pts_to r n)
(fun _ -> pts_to r (hide x)) | {
"checked_file": "/",
"dependencies": [
"PulseCore.Observability.fst.checked",
"PulseCore.FractionalPermission.fst.checked",
"Pulse.Lib.Core.fsti.checked",
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Lib.Reference.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "PulseCore.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Lib.Core",
"short_module": null
},
{
"abbrev": false,
"full_module": "PulseCore.Observability",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: Prims.bool -> p: Pulse.Lib.Core.vprop -> q: Pulse.Lib.Core.vprop -> Pulse.Lib.Core.vprop | Prims.Tot | [
"total"
] | [] | [
"Prims.bool",
"Pulse.Lib.Core.vprop"
] | [] | false | false | false | true | false | let cond b (p: vprop) (q: vprop) =
| if b then p else q | false |
|
Hacl.Impl.Ed25519.Group.fst | Hacl.Impl.Ed25519.Group.point_zero | val point_zero : BE.lone_st U64 20ul 0ul mk_to_ed25519_comm_monoid | val point_zero : BE.lone_st U64 20ul 0ul mk_to_ed25519_comm_monoid | let point_zero ctx one = make_point_inf one | {
"file_name": "code/ed25519/Hacl.Impl.Ed25519.Group.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 43,
"end_line": 59,
"start_col": 0,
"start_line": 59
} | module Hacl.Impl.Ed25519.Group
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum25519
open Hacl.Impl.Ed25519.PointConstants
module LSeq = Lib.Sequence
module F51 = Hacl.Impl.Ed25519.Field51
module BE = Hacl.Impl.Exponentiation.Definitions
module S = Spec.Ed25519
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
unfold
let a_spec = S.aff_point_c
unfold
let refl (a:LSeq.lseq uint64 20{F51.linv a}) : GTot a_spec =
S.to_aff_point (F51.refl_ext_point a)
unfold
let linv_ctx (a:LSeq.lseq uint64 0) : Type0 = True
inline_for_extraction noextract
let mk_to_ed25519_comm_monoid : BE.to_comm_monoid U64 20ul 0ul = {
BE.a_spec = a_spec;
BE.comm_monoid = S.mk_ed25519_comm_monoid;
BE.linv_ctx = linv_ctx;
BE.linv = F51.linv;
BE.refl = refl;
}
inline_for_extraction noextract
val point_add : BE.lmul_st U64 20ul 0ul mk_to_ed25519_comm_monoid
let point_add ctx x y xy =
let h0 = ST.get () in
Spec.Ed25519.Lemmas.to_aff_point_add_lemma
(F51.refl_ext_point (as_seq h0 x)) (F51.refl_ext_point (as_seq h0 y));
Hacl.Impl.Ed25519.PointAdd.point_add xy x y
inline_for_extraction noextract
val point_double : BE.lsqr_st U64 20ul 0ul mk_to_ed25519_comm_monoid
let point_double ctx x xx =
let h0 = ST.get () in
Spec.Ed25519.Lemmas.to_aff_point_double_lemma (F51.refl_ext_point (as_seq h0 x));
Hacl.Impl.Ed25519.PointDouble.point_double xx x
inline_for_extraction noextract | {
"checked_file": "/",
"dependencies": [
"Spec.Ed25519.Lemmas.fsti.checked",
"Spec.Ed25519.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Exponentiation.Definitions.fst.checked",
"Hacl.Impl.Ed25519.PointDouble.fst.checked",
"Hacl.Impl.Ed25519.PointConstants.fst.checked",
"Hacl.Impl.Ed25519.PointAdd.fst.checked",
"Hacl.Impl.Ed25519.Field51.fst.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Ed25519.Group.fst"
} | [
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation.Definitions",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.PointConstants",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Hacl.Impl.Exponentiation.Definitions.lone_st Lib.IntTypes.U64
20ul
0ul
Hacl.Impl.Ed25519.Group.mk_to_ed25519_comm_monoid | Prims.Tot | [
"total"
] | [] | [
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.U64",
"Lib.IntTypes.SEC",
"FStar.UInt32.__uint_to_t",
"Hacl.Impl.Ed25519.PointConstants.make_point_inf",
"Prims.unit"
] | [] | false | false | false | false | false | let point_zero ctx one =
| make_point_inf one | false |
Hacl.Impl.Ed25519.Group.fst | Hacl.Impl.Ed25519.Group.point_double | val point_double : BE.lsqr_st U64 20ul 0ul mk_to_ed25519_comm_monoid | val point_double : BE.lsqr_st U64 20ul 0ul mk_to_ed25519_comm_monoid | let point_double ctx x xx =
let h0 = ST.get () in
Spec.Ed25519.Lemmas.to_aff_point_double_lemma (F51.refl_ext_point (as_seq h0 x));
Hacl.Impl.Ed25519.PointDouble.point_double xx x | {
"file_name": "code/ed25519/Hacl.Impl.Ed25519.Group.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 49,
"end_line": 54,
"start_col": 0,
"start_line": 51
} | module Hacl.Impl.Ed25519.Group
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum25519
open Hacl.Impl.Ed25519.PointConstants
module LSeq = Lib.Sequence
module F51 = Hacl.Impl.Ed25519.Field51
module BE = Hacl.Impl.Exponentiation.Definitions
module S = Spec.Ed25519
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
unfold
let a_spec = S.aff_point_c
unfold
let refl (a:LSeq.lseq uint64 20{F51.linv a}) : GTot a_spec =
S.to_aff_point (F51.refl_ext_point a)
unfold
let linv_ctx (a:LSeq.lseq uint64 0) : Type0 = True
inline_for_extraction noextract
let mk_to_ed25519_comm_monoid : BE.to_comm_monoid U64 20ul 0ul = {
BE.a_spec = a_spec;
BE.comm_monoid = S.mk_ed25519_comm_monoid;
BE.linv_ctx = linv_ctx;
BE.linv = F51.linv;
BE.refl = refl;
}
inline_for_extraction noextract
val point_add : BE.lmul_st U64 20ul 0ul mk_to_ed25519_comm_monoid
let point_add ctx x y xy =
let h0 = ST.get () in
Spec.Ed25519.Lemmas.to_aff_point_add_lemma
(F51.refl_ext_point (as_seq h0 x)) (F51.refl_ext_point (as_seq h0 y));
Hacl.Impl.Ed25519.PointAdd.point_add xy x y
inline_for_extraction noextract | {
"checked_file": "/",
"dependencies": [
"Spec.Ed25519.Lemmas.fsti.checked",
"Spec.Ed25519.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Exponentiation.Definitions.fst.checked",
"Hacl.Impl.Ed25519.PointDouble.fst.checked",
"Hacl.Impl.Ed25519.PointConstants.fst.checked",
"Hacl.Impl.Ed25519.PointAdd.fst.checked",
"Hacl.Impl.Ed25519.Field51.fst.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Ed25519.Group.fst"
} | [
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation.Definitions",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.PointConstants",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Hacl.Impl.Exponentiation.Definitions.lsqr_st Lib.IntTypes.U64
20ul
0ul
Hacl.Impl.Ed25519.Group.mk_to_ed25519_comm_monoid | Prims.Tot | [
"total"
] | [] | [
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.U64",
"Lib.IntTypes.SEC",
"FStar.UInt32.__uint_to_t",
"Hacl.Impl.Ed25519.PointDouble.point_double",
"Prims.unit",
"Spec.Ed25519.Lemmas.to_aff_point_double_lemma",
"Hacl.Impl.Ed25519.Field51.refl_ext_point",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get"
] | [] | false | false | false | false | false | let point_double ctx x xx =
| let h0 = ST.get () in
Spec.Ed25519.Lemmas.to_aff_point_double_lemma (F51.refl_ext_point (as_seq h0 x));
Hacl.Impl.Ed25519.PointDouble.point_double xx x | false |
CDDLExtractionTest.Bytes.fst | CDDLExtractionTest.Bytes.impl_mytype | val impl_mytype : CDDL.Pulse.impl_typ CDDL.Spec.bytes | let impl_mytype = impl_bytes () | {
"file_name": "share/steel/examples/pulse/dice/cbor/CDDLExtractionTest.Bytes.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 31,
"end_line": 25,
"start_col": 0,
"start_line": 25
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CDDLExtractionTest.Bytes
open Pulse.Lib.Pervasives
open CBOR.Spec
open CDDL.Spec
open CBOR.Pulse
open CDDL.Pulse | {
"checked_file": "/",
"dependencies": [
"Pulse.Lib.Pervasives.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"CDDL.Spec.fsti.checked",
"CDDL.Pulse.fst.checked",
"CBOR.Spec.fsti.checked",
"CBOR.Pulse.fst.checked"
],
"interface_file": false,
"source_file": "CDDLExtractionTest.Bytes.fst"
} | [
{
"abbrev": false,
"full_module": "CDDL.Pulse",
"short_module": null
},
{
"abbrev": false,
"full_module": "CBOR.Pulse",
"short_module": null
},
{
"abbrev": false,
"full_module": "CDDL.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "CBOR.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Lib.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "CDDLExtractionTest",
"short_module": null
},
{
"abbrev": false,
"full_module": "CDDLExtractionTest",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | CDDL.Pulse.impl_typ CDDL.Spec.bytes | Prims.Tot | [
"total"
] | [] | [
"CDDL.Pulse.impl_bytes"
] | [] | false | false | false | true | false | let impl_mytype =
| impl_bytes () | false |
|
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.sum_repr_type | val sum_repr_type (t: sum) : Tot eqtype | val sum_repr_type (t: sum) : Tot eqtype | let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 51,
"end_line": 48,
"start_col": 0,
"start_line": 47
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: LowParse.Spec.Sum.sum -> Prims.eqtype | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.sum",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Base.refine_with_tag",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"LowParse.Spec.Sum.synth_case_recip'",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | false | true | false | let sum_repr_type (t: sum) : Tot eqtype =
| match t with | Sum _ repr _ _ _ _ _ _ _ _ -> repr | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.sum_key_type | val sum_key_type (t: sum) : Tot eqtype | val sum_key_type (t: sum) : Tot eqtype | let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 49,
"end_line": 44,
"start_col": 0,
"start_line": 43
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: LowParse.Spec.Sum.sum -> Prims.eqtype | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.sum",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Base.refine_with_tag",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"LowParse.Spec.Sum.synth_case_recip'",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | false | true | false | let sum_key_type (t: sum) : Tot eqtype =
| match t with | Sum key _ _ _ _ _ _ _ _ _ -> key | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_case_recip' | val synth_case_recip'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: (k: enum_key e -> x: refine_with_tag tag_of_data k -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x)) | val synth_case_recip'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: (k: enum_key e -> x: refine_with_tag tag_of_data k -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x)) | let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 36,
"end_line": 16,
"start_col": 0,
"start_line": 6
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: LowParse.Spec.Enum.enum key repr ->
tag_of_data: (_: data -> LowParse.Spec.Enum.enum_key e) ->
type_of_tag: (_: LowParse.Spec.Enum.enum_key e -> Type) ->
synth_case_recip:
(k: LowParse.Spec.Enum.enum_key e -> x: LowParse.Spec.Base.refine_with_tag tag_of_data k
-> type_of_tag k) ->
x: data
-> Prims.GTot (type_of_tag (tag_of_data x)) | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Base.refine_with_tag"
] | [] | false | false | false | false | false | let synth_case_recip'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: (k: enum_key e -> x: refine_with_tag tag_of_data k -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x)) =
| synth_case_recip (tag_of_data x) x | false |
Wasm11.fst | Wasm11.test2 | val test2: Prims.unit -> SteelT bool emp (fun _ -> emp) | val test2: Prims.unit -> SteelT bool emp (fun _ -> emp) | let test2 () : SteelT bool emp (fun _ -> emp) =
let r = malloc def_t 8sz in
ghost_split r 4sz;
let r1 = split_l r 4sz in
let r2 = split_r r 4sz in
change_equal_slprop (varray (split_l r 4sz)) (varray r1);
change_equal_slprop (varray (split_r r 4sz)) (varray r2);
let _ = mk 4s in
let b = ptrdiff r2 r1 in
ghost_join r1 r2 ();
change_equal_slprop
(varray (merge r1 r2))
(varray r);
// Free not supported in wasm
drop (varray r);
return (b = mk 4s) | {
"file_name": "share/steel/tests/krml/Wasm11.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 20,
"end_line": 49,
"start_col": 0,
"start_line": 34
} | module Wasm11
open FStar.SizeT
open FStar.PtrdiffT
open FStar.UInt64
open Steel.Effect.Atomic
open Steel.Effect
open Steel.Array
(* WASM tests for pointer subtraction *)
let test1 () : SteelT bool emp (fun _ -> emp) =
let r = malloc 0uL 8sz in
ghost_split r 4sz;
let r1 = split_l r 4sz in
let r2 = split_r r 4sz in
change_equal_slprop (varray (split_l r 4sz)) (varray r1);
change_equal_slprop (varray (split_r r 4sz)) (varray r2);
let _ = mk 4s in
let b = ptrdiff r2 r1 in
ghost_join r1 r2 ();
change_equal_slprop
(varray (merge r1 r2))
(varray r);
// Free not supported in Wasm
drop (varray r);
return (b = mk 4s)
type t = { foo: UInt32.t; bar: UInt16.t }
inline_for_extraction noextract
let def_t : t = { foo = 0ul; bar = 0us } | {
"checked_file": "/",
"dependencies": [
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"Steel.Array.fsti.checked",
"prims.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.SizeT.fsti.checked",
"FStar.PtrdiffT.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked"
],
"interface_file": false,
"source_file": "Wasm11.fst"
} | [
{
"abbrev": false,
"full_module": "Steel.Array",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.UInt64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PtrdiffT",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.SizeT",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: Prims.unit -> Steel.Effect.SteelT Prims.bool | Steel.Effect.SteelT | [] | [] | [
"Prims.unit",
"Steel.Effect.Atomic.return",
"Prims.bool",
"FStar.Ghost.hide",
"FStar.Set.set",
"Steel.Memory.iname",
"FStar.Set.empty",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"Steel.Effect.Common.vprop",
"Steel.Effect.Common.req",
"Steel.Effect.Common.rm",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit",
"Steel.Effect.Common.emp",
"Prims.op_Equality",
"FStar.PtrdiffT.t",
"FStar.PtrdiffT.mk",
"FStar.Int16.__int_to_t",
"Steel.Effect.Atomic.drop",
"Steel.Array.varray",
"Wasm11.t",
"Steel.Effect.Atomic.change_equal_slprop",
"Steel.ST.Array.merge",
"Steel.ST.Array.array",
"Steel.Array.ghost_join",
"Steel.FractionalPermission.full_perm",
"Steel.Array.ptrdiff",
"Steel.ST.Array.split_r",
"FStar.SizeT.__uint_to_t",
"Steel.ST.Array.split_l",
"FStar.SizeT.t",
"Steel.Array.ghost_split",
"Steel.Array.malloc",
"Wasm11.def_t"
] | [] | false | true | false | false | false | let test2 () : SteelT bool emp (fun _ -> emp) =
| let r = malloc def_t 8sz in
ghost_split r 4sz;
let r1 = split_l r 4sz in
let r2 = split_r r 4sz in
change_equal_slprop (varray (split_l r 4sz)) (varray r1);
change_equal_slprop (varray (split_r r 4sz)) (varray r2);
let _ = mk 4s in
let b = ptrdiff r2 r1 in
ghost_join r1 r2 ();
change_equal_slprop (varray (merge r1 r2)) (varray r);
drop (varray r);
return (b = mk 4s) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_sum_cases' | val parse_sum_cases'
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x)) | val parse_sum_cases'
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x)) | let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 45,
"end_line": 131,
"start_col": 0,
"start_line": 124
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.sum ->
f:
(x: LowParse.Spec.Sum.sum_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag s x))) ->
x: LowParse.Spec.Sum.sum_key s
-> LowParse.Spec.Base.parser (FStar.Pervasives.dfst (f x)) (LowParse.Spec.Sum.sum_cases s x) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Spec.Combinators.parse_synth",
"Prims.__proj__Mkdtuple2__item___1",
"LowParse.Spec.Sum.sum_cases",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Sum.synth_sum_case",
"Prims.unit",
"LowParse.Spec.Sum.synth_sum_case_injective",
"FStar.Pervasives.dfst"
] | [] | false | false | false | false | false | let parse_sum_cases'
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x)) =
| synth_sum_case_injective s x;
(dsnd (f x)) `parse_synth` (synth_sum_case s x) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_sum | val parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t)) | val parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t)) | let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 39,
"end_line": 174,
"start_col": 0,
"start_line": 168
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.sum ->
p: LowParse.Spec.Base.parser kt (LowParse.Spec.Sum.sum_repr_type t) ->
pc:
(x: LowParse.Spec.Sum.sum_key t
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag t x)))
-> LowParse.Spec.Base.parser (LowParse.Spec.Sum.parse_sum_kind kt t pc)
(LowParse.Spec.Sum.sum_type t) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_repr_type",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Spec.Sum.parse_sum'",
"LowParse.Spec.Sum.weaken_parse_cases_kind",
"LowParse.Spec.Sum.parse_sum_cases",
"LowParse.Spec.Sum.parse_sum_kind",
"LowParse.Spec.Sum.sum_type"
] | [] | false | false | false | false | false | let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t)) =
| parse_sum' t p (parse_sum_cases t pc) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_sum' | val parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: (x: sum_key t -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t)) | val parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: (x: sum_key t -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t)) | let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 6,
"end_line": 158,
"start_col": 0,
"start_line": 144
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.sum ->
p: LowParse.Spec.Base.parser kt (LowParse.Spec.Sum.sum_repr_type t) ->
pc:
(x: LowParse.Spec.Sum.sum_key t
-> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_cases t x))
-> LowParse.Spec.Base.parser (LowParse.Spec.Combinators.and_then_kind (LowParse.Spec.Combinators.parse_filter_kind
kt)
k)
(LowParse.Spec.Sum.sum_type t) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_repr_type",
"LowParse.Spec.Sum.sum_key",
"LowParse.Spec.Sum.sum_cases",
"LowParse.Spec.Combinators.parse_tagged_union",
"LowParse.Spec.Combinators.parse_filter_kind",
"LowParse.Spec.Enum.parse_enum_key",
"LowParse.Spec.Sum.sum_key_type",
"LowParse.Spec.Sum.sum_enum",
"LowParse.Spec.Sum.sum_type",
"LowParse.Spec.Sum.sum_tag_of_data",
"LowParse.Spec.Combinators.and_then_kind"
] | [] | false | false | false | false | false | let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: (x: sum_key t -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t)) =
| parse_tagged_union #(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_sum_kind | val parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind | val parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind | let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 69,
"end_line": 166,
"start_col": 0,
"start_line": 161
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
kt: LowParse.Spec.Base.parser_kind ->
t: LowParse.Spec.Sum.sum ->
pc:
(x: LowParse.Spec.Sum.sum_key t
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag t x)))
-> LowParse.Spec.Base.parser_kind | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.parse_filter_kind",
"LowParse.Spec.Sum.weaken_parse_cases_kind"
] | [] | false | false | false | false | false | let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind =
| and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_dsum_case_recip' | val synth_dsum_case_recip'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip:
(k: maybe_enum_key e -> (refine_with_tag tag_of_data k)
-> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) | val synth_dsum_case_recip'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip:
(k: maybe_enum_key e -> (refine_with_tag tag_of_data k)
-> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) | let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 33,
"end_line": 459,
"start_col": 0,
"start_line": 447
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: LowParse.Spec.Enum.enum key repr ->
tag_of_data: (_: data -> Prims.GTot (LowParse.Spec.Enum.maybe_enum_key e)) ->
type_of_known_tag: (_: LowParse.Spec.Enum.enum_key e -> Type) ->
type_of_unknown_tag: Type ->
synth_case_recip:
(k: LowParse.Spec.Enum.maybe_enum_key e -> _: LowParse.Spec.Base.refine_with_tag tag_of_data k
-> LowParse.Spec.Sum.dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k) ->
y: data
-> Prims.GTot
(Prims.dtuple2 (LowParse.Spec.Enum.maybe_enum_key e)
(fun x -> LowParse.Spec.Sum.dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)) | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Sum.dsum_type_of_tag'",
"Prims.Mkdtuple2",
"Prims.dtuple2"
] | [] | false | false | false | false | false | let synth_dsum_case_recip'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip:
(k: maybe_enum_key e -> (refine_with_tag tag_of_data k)
-> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) =
| let tg = tag_of_data y in
(| tg, synth_case_recip tg y |) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.sum_tag_of_data | val sum_tag_of_data (t: sum) : Tot (x: sum_type t -> Tot (sum_key t)) | val sum_tag_of_data (t: sum) : Tot (x: sum_type t -> Tot (sum_key t)) | let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 52,
"end_line": 72,
"start_col": 0,
"start_line": 70
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: LowParse.Spec.Sum.sum -> x: LowParse.Spec.Sum.sum_type t -> LowParse.Spec.Sum.sum_key t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.sum",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Base.refine_with_tag",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"LowParse.Spec.Sum.synth_case_recip'",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Sum.sum_type",
"LowParse.Spec.Sum.sum_key"
] | [] | false | false | false | false | false | let sum_tag_of_data (t: sum) : Tot (x: sum_type t -> Tot (sum_key t)) =
| match t with | Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.dsum_repr_type | val dsum_repr_type (t: dsum) : Tot eqtype | val dsum_repr_type (t: dsum) : Tot eqtype | let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 54,
"end_line": 494,
"start_col": 0,
"start_line": 493
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: LowParse.Spec.Sum.dsum -> Prims.eqtype | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_type_of_tag'",
"LowParse.Spec.Base.refine_with_tag",
"Prims.squash",
"Prims.eq2"
] | [] | false | false | false | true | false | let dsum_repr_type (t: dsum) : Tot eqtype =
| match t with | DSum _ repr _ _ _ _ _ _ _ _ _ -> repr | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.dsum_key_type | val dsum_key_type (t: dsum) : Tot eqtype | val dsum_key_type (t: dsum) : Tot eqtype | let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 52,
"end_line": 490,
"start_col": 0,
"start_line": 489
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: LowParse.Spec.Sum.dsum -> Prims.eqtype | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_type_of_tag'",
"LowParse.Spec.Base.refine_with_tag",
"Prims.squash",
"Prims.eq2"
] | [] | false | false | false | true | false | let dsum_key_type (t: dsum) : Tot eqtype =
| match t with | DSum key _ _ _ _ _ _ _ _ _ _ -> key | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.sum_enum | val sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) | val sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) | let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 45,
"end_line": 52,
"start_col": 0,
"start_line": 51
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: LowParse.Spec.Sum.sum
-> LowParse.Spec.Enum.enum (LowParse.Spec.Sum.sum_key_type t) (LowParse.Spec.Sum.sum_repr_type t) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.sum",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Base.refine_with_tag",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"LowParse.Spec.Sum.synth_case_recip'",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Sum.sum_key_type",
"LowParse.Spec.Sum.sum_repr_type"
] | [] | false | false | false | false | false | let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
| match t with | Sum _ _ e _ _ _ _ _ _ _ -> e | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_sum_cases | val parse_sum_cases
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x)) | val parse_sum_cases
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x)) | let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 86,
"end_line": 109,
"start_col": 0,
"start_line": 103
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.sum ->
f:
(x: LowParse.Spec.Sum.sum_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag s x))) ->
x: LowParse.Spec.Sum.sum_key s
-> LowParse.Spec.Base.parser (LowParse.Spec.Sum.weaken_parse_cases_kind s f)
(LowParse.Spec.Sum.sum_cases s x) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Sum.weaken_parse_cases_kind",
"LowParse.Spec.Sum.sum_cases",
"LowParse.Spec.Base.weaken",
"Prims.__proj__Mkdtuple2__item___1",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Sum.synth_sum_case",
"Prims.unit",
"LowParse.Spec.Sum.synth_sum_case_injective"
] | [] | false | false | false | false | false | let parse_sum_cases
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x)) =
| synth_sum_case_injective s x;
(weaken (weaken_parse_cases_kind s f) (dsnd (f x))) `parse_synth` (synth_sum_case s x) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_sum_case | val synth_sum_case (s: sum) (k: sum_key s) (x: sum_type_of_tag s k) : Tot (sum_cases s k) | val synth_sum_case (s: sum) (k: sum_key s) (x: sum_type_of_tag s k) : Tot (sum_cases s k) | let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 50,
"end_line": 97,
"start_col": 0,
"start_line": 95
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.sum ->
k: LowParse.Spec.Sum.sum_key s ->
x: LowParse.Spec.Sum.sum_type_of_tag s k
-> LowParse.Spec.Sum.sum_cases s k | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.sum",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Base.refine_with_tag",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"LowParse.Spec.Sum.synth_case_recip'",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Sum.sum_key",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Spec.Sum.sum_cases"
] | [] | false | false | false | false | false | let synth_sum_case (s: sum) (k: sum_key s) (x: sum_type_of_tag s k) : Tot (sum_cases s k) =
| match s with | Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.weaken_parse_cases_kind | val weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
: Tot parser_kind | val weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
: Tot parser_kind | let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s)) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 35,
"end_line": 92,
"start_col": 0,
"start_line": 83
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.sum ->
f:
(x: LowParse.Spec.Sum.sum_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag s x)))
-> LowParse.Spec.Base.parser_kind | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Spec.Base.glb_list_of",
"LowParse.Spec.Sum.sum_key_type",
"FStar.List.Tot.Base.mem",
"Prims.bool",
"LowParse.Spec.Base.default_parser_kind",
"FStar.List.Tot.Base.map",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Sum.sum_repr_type",
"FStar.Pervasives.Native.fst",
"LowParse.Spec.Sum.sum_enum",
"Prims.list"
] | [] | false | false | false | false | false | let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
: Tot parser_kind =
| let keys:list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s)
(fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then
let (| k , _ |) = f x in
k
else default_parser_kind)
(List.Tot.map fst (sum_enum s)) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.sum_key_type_of_sum_key | val sum_key_type_of_sum_key (t: sum) (k: sum_key t)
: Pure (sum_key_type t) (requires True) (ensures (fun k' -> k' == (k <: sum_key_type t))) | val sum_key_type_of_sum_key (t: sum) (k: sum_key t)
: Pure (sum_key_type t) (requires True) (ensures (fun k' -> k' == (k <: sum_key_type t))) | let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 62,
"start_col": 0,
"start_line": 59
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: LowParse.Spec.Sum.sum -> k: LowParse.Spec.Sum.sum_key t
-> Prims.Pure (LowParse.Spec.Sum.sum_key_type t) | Prims.Pure | [] | [] | [
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"LowParse.Spec.Sum.sum_key_type",
"Prims.l_True",
"Prims.eq2"
] | [] | false | false | false | false | false | let sum_key_type_of_sum_key (t: sum) (k: sum_key t)
: Pure (sum_key_type t) (requires True) (ensures (fun k' -> k' == (k <: sum_key_type t))) =
| k | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_sum_cases_eq' | val parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
(input: bytes)
: Lemma (parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input) | val parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
(input: bytes)
: Lemma (parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input) | let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 56,
"end_line": 142,
"start_col": 0,
"start_line": 133
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.sum ->
f:
(x: LowParse.Spec.Sum.sum_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag s x))) ->
x: LowParse.Spec.Sum.sum_key s ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_sum_cases s f x) input ==
LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_sum_cases' s f x) input) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.parse_synth_eq",
"Prims.__proj__Mkdtuple2__item___1",
"LowParse.Spec.Sum.sum_cases",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Sum.synth_sum_case",
"Prims.unit",
"LowParse.Spec.Sum.weaken_parse_cases_kind",
"LowParse.Spec.Base.weaken",
"LowParse.Spec.Sum.synth_sum_case_injective",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Sum.parse_sum_cases",
"LowParse.Spec.Sum.parse_sum_cases'",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
(input: bytes)
: Lemma (parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input) =
| synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.dsum_type_of_tag | val dsum_type_of_tag : t: LowParse.Spec.Sum.dsum -> k: LowParse.Spec.Enum.maybe_enum_key (LowParse.Spec.Sum.dsum_enum t)
-> Type | let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 89,
"end_line": 540,
"start_col": 0,
"start_line": 539
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: LowParse.Spec.Sum.dsum -> k: LowParse.Spec.Enum.maybe_enum_key (LowParse.Spec.Sum.dsum_enum t)
-> Type | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_type_of_tag'",
"LowParse.Spec.Sum.dsum_key_type",
"LowParse.Spec.Sum.dsum_repr_type",
"LowParse.Spec.Sum.dsum_enum",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Spec.Enum.maybe_enum_key"
] | [] | false | false | false | false | true | let dsum_type_of_tag (t: dsum) =
| dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t) | false |
|
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.weaken_parse_dsum_cases_kind' | val weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k': parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind | val weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k': parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind | let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k' | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 37,
"end_line": 560,
"start_col": 0,
"start_line": 554
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k' | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.dsum ->
f:
(x: LowParse.Spec.Sum.dsum_known_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag s x))) ->
p: LowParse.Spec.Base.parser k' (LowParse.Spec.Sum.dsum_type_of_unknown_tag s)
-> LowParse.Spec.Base.parser_kind | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Spec.Sum.weaken_parse_dsum_cases_kind"
] | [] | false | false | false | false | false | let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k': parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind =
| weaken_parse_dsum_cases_kind s f k' | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_dsum_cases_kind | val parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind | val parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind | let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 10,
"end_line": 634,
"start_col": 0,
"start_line": 625
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.dsum ->
f:
(x: LowParse.Spec.Sum.dsum_known_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag s x))) ->
g: LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_unknown_tag s) ->
x: LowParse.Spec.Sum.dsum_key s
-> LowParse.Spec.Base.parser_kind | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Spec.Sum.dsum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_key_type",
"LowParse.Spec.Sum.dsum_repr_type",
"LowParse.Spec.Sum.dsum_enum",
"FStar.Pervasives.dfst",
"LowParse.Spec.Enum.maybe_enum_key"
] | [] | false | false | false | false | false | let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind =
| match x with
| Known k -> dfst (f k)
| _ -> k | false |
CQueue.Cell.fst | CQueue.Cell.ccell_not_null | val ccell_not_null
(#opened: _)
(#a: Type0)
(c: ccell_ptrvalue a)
: SteelGhost (squash (ccell_ptrvalue_is_null c == false)) opened
(ccell c)
(fun _ -> ccell c)
(fun _ -> True)
(fun h _ h' ->
h' (ccell c) == h (ccell c)
) | val ccell_not_null
(#opened: _)
(#a: Type0)
(c: ccell_ptrvalue a)
: SteelGhost (squash (ccell_ptrvalue_is_null c == false)) opened
(ccell c)
(fun _ -> ccell c)
(fun _ -> True)
(fun h _ h' ->
h' (ccell c) == h (ccell c)
) | let ccell_not_null
#opened #a c
=
let c1 = elim_ccell_ghost c in
let c2 : ccell_lvalue a = c in
change_equal_slprop (vptr (ccell_data c1)) (vptr (ccell_data c2));
change_equal_slprop (vptr (ccell_next c1)) (vptr (ccell_next c2));
intro_ccell c2;
change_equal_slprop (ccell c2) (ccell c);
() | {
"file_name": "share/steel/examples/steel/CQueue.Cell.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 4,
"end_line": 184,
"start_col": 0,
"start_line": 175
} | module CQueue.Cell
(* A Steel model of C cell structs *)
#push-options "--__no_positivity"
noeq
type mcell (a: Type0) = {
data: ref a;
next: ref (mcell a);
all_or_none_null: squash (is_null data == is_null next); // TODO: /\ freeable data /\ freeable next, if freeable is implemented as a pure space proposition rather than as stateful permissions (i.e. "freeable if you have the whole permission")
}
#pop-options
let ccell_ptrvalue a = mcell a
let ccell_ptrvalue_null a = {data = null; next = null; all_or_none_null = ()}
let ccell_ptrvalue_is_null #a x = is_null x.data
let ccell_data #a c =
c.data
let ccell_next #a c =
c.next
let ccell_is_lvalue_refine
(#a: Type)
(c: ccell_ptrvalue a)
(_: t_of emp)
: Tot prop
= ccell_ptrvalue_is_null c == false
let ccell_is_lvalue_rewrite
(#a: Type)
(c: ccell_ptrvalue a)
(_: normal (t_of (emp `vrefine` ccell_is_lvalue_refine c)))
: GTot (ccell_lvalue a)
= c
[@@ __steel_reduce__; __reduce__ ]
let ccell_is_lvalue0
(#a: Type)
(c: ccell_ptrvalue a)
: Tot vprop
= emp `vrefine` ccell_is_lvalue_refine c `vrewrite` ccell_is_lvalue_rewrite c
let ccell_is_lvalue_hp
(#a: Type)
(c: ccell_ptrvalue a)
: Tot (slprop u#1)
= hp_of (ccell_is_lvalue0 c)
let ccell_is_lvalue_sel
(#a: Type)
(c: ccell_ptrvalue a)
: GTot (selector (ccell_lvalue a) (ccell_is_lvalue_hp c))
= sel_of (ccell_is_lvalue0 c)
let intro_ccell_is_lvalue
#_ #a c
=
intro_vrefine emp (ccell_is_lvalue_refine c);
intro_vrewrite (emp `vrefine` ccell_is_lvalue_refine c) (ccell_is_lvalue_rewrite c);
change_slprop_rel
(ccell_is_lvalue0 c)
(ccell_is_lvalue c)
(fun x y -> x == y)
(fun m ->
assert_norm (hp_of (ccell_is_lvalue c) == hp_of (ccell_is_lvalue0 c));
assert_norm (sel_of (ccell_is_lvalue c) m === sel_of (ccell_is_lvalue0 c) m)
)
let elim_ccell_is_lvalue
#_ #a c
=
change_slprop_rel
(ccell_is_lvalue c)
(ccell_is_lvalue0 c)
(fun x y -> x == y)
(fun m ->
assert_norm (hp_of (ccell_is_lvalue c) == hp_of (ccell_is_lvalue0 c));
assert_norm (sel_of (ccell_is_lvalue c) m === sel_of (ccell_is_lvalue0 c) m)
);
elim_vrewrite (emp `vrefine` ccell_is_lvalue_refine c) (ccell_is_lvalue_rewrite c);
elim_vrefine emp (ccell_is_lvalue_refine c)
[@@ __steel_reduce__]
let ccell0 (a: Type0) (c: ccell_lvalue a) : Tot vprop =
(vptr (ccell_data c) `star` vptr (ccell_next c))
// unfold
let ccell_rewrite
(#a: Type0)
(c: ccell_ptrvalue a)
(x: dtuple2 (ccell_lvalue a) (vdep_payload (ccell_is_lvalue c) (ccell0 a)))
: GTot (vcell a)
= let p =
dsnd #(ccell_lvalue a) #(vdep_payload (ccell_is_lvalue c) (ccell0 a)) x
in
{
vcell_data = fst p;
vcell_next = snd p;
}
[@@ __steel_reduce__ ; __reduce__] // to avoid manual unfoldings through change_slprop
let ccell1
(#a: Type0)
(c: ccell_ptrvalue a)
: Tot vprop
= ccell_is_lvalue c `vdep` ccell0 a `vrewrite` ccell_rewrite c
let ccell_hp
#a c
= hp_of (ccell1 c)
let ccell_sel
#a c
= sel_of (ccell1 c)
let intro_ccell
#opened #a c
=
intro_ccell_is_lvalue c;
reveal_star (vptr (ccell_data c)) (vptr (ccell_next c));
intro_vdep
(ccell_is_lvalue c)
(vptr (ccell_data c) `star` vptr (ccell_next c))
(ccell0 a);
intro_vrewrite
(ccell_is_lvalue c `vdep` ccell0 a)
(ccell_rewrite c);
change_slprop_rel
(ccell1 c)
(ccell c)
(fun x y -> x == y)
(fun m ->
assert_norm (hp_of (ccell1 c) == ccell_hp c);
assert_norm (sel_of (ccell1 c) m === sel_of (ccell c) m)
)
let elim_ccell_ghost
#opened #a c
=
change_slprop_rel
(ccell c)
(ccell1 c)
(fun x y -> x == y)
(fun m ->
assert_norm (hp_of (ccell1 c) == ccell_hp c);
assert_norm (sel_of (ccell1 c) m === sel_of (ccell c) m)
);
elim_vrewrite
(ccell_is_lvalue c `vdep` ccell0 a)
(ccell_rewrite c);
let c' : Ghost.erased (ccell_lvalue a) = elim_vdep
(ccell_is_lvalue c)
(ccell0 a)
in
elim_ccell_is_lvalue c;
change_equal_slprop
(ccell0 a c')
(vptr (ccell_data (Ghost.reveal c')) `star` vptr (ccell_next (Ghost.reveal c')));
reveal_star (vptr (ccell_data (Ghost.reveal c'))) (vptr (ccell_next (Ghost.reveal c')));
c'
let elim_ccell
#opened #a c
=
let c2 = elim_ccell_ghost c in
let c : ccell_lvalue a = c in
change_equal_slprop (vptr (ccell_data c2)) (vptr (ccell_data c));
change_equal_slprop (vptr (ccell_next c2)) (vptr (ccell_next c));
return c | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "CQueue.Cell.fst"
} | [
{
"abbrev": false,
"full_module": "Steel.Reference",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "CQueue",
"short_module": null
},
{
"abbrev": false,
"full_module": "CQueue",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | c: CQueue.Cell.ccell_ptrvalue a
-> Steel.Effect.Atomic.SteelGhost (Prims.squash (CQueue.Cell.ccell_ptrvalue_is_null c == false)) | Steel.Effect.Atomic.SteelGhost | [] | [] | [
"Steel.Memory.inames",
"CQueue.Cell.ccell_ptrvalue",
"Prims.squash",
"Prims.eq2",
"Prims.bool",
"CQueue.Cell.ccell_ptrvalue_is_null",
"Prims.unit",
"Steel.Effect.Atomic.change_equal_slprop",
"CQueue.Cell.ccell",
"CQueue.Cell.intro_ccell",
"Steel.Reference.vptr",
"CQueue.Cell.ccell_next",
"FStar.Ghost.reveal",
"CQueue.Cell.ccell_lvalue",
"CQueue.Cell.ccell_data",
"FStar.Ghost.erased",
"CQueue.Cell.elim_ccell_ghost"
] | [] | false | true | true | false | false | let ccell_not_null #opened #a c =
| let c1 = elim_ccell_ghost c in
let c2:ccell_lvalue a = c in
change_equal_slprop (vptr (ccell_data c1)) (vptr (ccell_data c2));
change_equal_slprop (vptr (ccell_next c1)) (vptr (ccell_next c2));
intro_ccell c2;
change_equal_slprop (ccell c2) (ccell c);
() | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.serialize_sum_cases' | val serialize_sum_cases'
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(sr: (x: sum_key s -> Tot (serializer (dsnd (f x)))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x)) | val serialize_sum_cases'
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(sr: (x: sum_key s -> Tot (serializer (dsnd (f x)))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x)) | let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 5,
"end_line": 280,
"start_col": 0,
"start_line": 266
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.sum ->
f:
(x: LowParse.Spec.Sum.sum_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag s x))) ->
sr:
(x: LowParse.Spec.Sum.sum_key s -> LowParse.Spec.Base.serializer (FStar.Pervasives.dsnd (f x))
) ->
x: LowParse.Spec.Sum.sum_key s
-> LowParse.Spec.Base.serializer (LowParse.Spec.Sum.parse_sum_cases' s f x) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Spec.Base.serializer",
"Prims.__proj__Mkdtuple2__item___1",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Combinators.serialize_synth",
"LowParse.Spec.Sum.sum_cases",
"LowParse.Spec.Sum.synth_sum_case",
"LowParse.Spec.Sum.synth_sum_case_recip",
"Prims.unit",
"LowParse.Spec.Sum.synth_sum_case_inverse",
"LowParse.Spec.Sum.synth_sum_case_injective",
"FStar.Pervasives.dfst",
"LowParse.Spec.Sum.parse_sum_cases'"
] | [] | false | false | false | false | false | let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(sr: (x: sum_key s -> Tot (serializer (dsnd (f x)))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x)) =
| synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth _ (synth_sum_case s x) (sr x) (synth_sum_case_recip s x) ()) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_sum_case_inverse | val synth_sum_case_inverse (s: sum) (k: sum_key s)
: Lemma (synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k)) | val synth_sum_case_inverse (s: sum) (k: sum_key s)
: Lemma (synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k)) | let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 61,
"end_line": 264,
"start_col": 0,
"start_line": 262
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: LowParse.Spec.Sum.sum -> k: LowParse.Spec.Sum.sum_key s
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Combinators.synth_inverse (LowParse.Spec.Sum.synth_sum_case s k)
(LowParse.Spec.Sum.synth_sum_case_recip s k)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"FStar.Classical.forall_intro",
"LowParse.Spec.Sum.__proj__Sum__item__data",
"Prims.eq2",
"LowParse.Spec.Sum.__proj__Sum__item__synth_case",
"LowParse.Spec.Sum.__proj__Sum__item__tag_of_data",
"LowParse.Spec.Sum.synth_case_recip'",
"LowParse.Spec.Sum.__proj__Sum__item__key",
"LowParse.Spec.Sum.__proj__Sum__item__repr",
"LowParse.Spec.Sum.__proj__Sum__item__e",
"LowParse.Spec.Sum.__proj__Sum__item__type_of_tag",
"LowParse.Spec.Sum.__proj__Sum__item__synth_case_recip",
"LowParse.Spec.Sum.__proj__Sum__item__synth_case_synth_case_recip",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"LowParse.Spec.Combinators.synth_inverse",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Spec.Sum.sum_cases",
"LowParse.Spec.Sum.synth_sum_case",
"LowParse.Spec.Sum.synth_sum_case_recip",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let synth_sum_case_inverse (s: sum) (k: sum_key s)
: Lemma (synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k)) =
| Classical.forall_intro (Sum?.synth_case_synth_case_recip s) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_sum_cases_eq | val parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input ==
(match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed))) | val parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input ==
(match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed))) | let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 95,
"end_line": 122,
"start_col": 0,
"start_line": 111
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.sum ->
f:
(x: LowParse.Spec.Sum.sum_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag s x))) ->
x: LowParse.Spec.Sum.sum_key s ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_sum_cases s f x) input ==
(match LowParse.Spec.Base.parse (FStar.Pervasives.dsnd (f x)) input with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ y consumed) ->
FStar.Pervasives.Native.Some (LowParse.Spec.Sum.synth_sum_case s x y, consumed))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.parse_synth_eq",
"LowParse.Spec.Sum.weaken_parse_cases_kind",
"LowParse.Spec.Sum.sum_cases",
"LowParse.Spec.Base.weaken",
"Prims.__proj__Mkdtuple2__item___1",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Sum.synth_sum_case",
"Prims.unit",
"LowParse.Spec.Sum.synth_sum_case_injective",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Sum.parse_sum_cases",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input ==
(match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed))) =
| synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_sum_case_injective | val synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma (synth_injective (synth_sum_case s k)) | val synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma (synth_injective (synth_sum_case s k)) | let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 63,
"end_line": 101,
"start_col": 0,
"start_line": 99
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: LowParse.Spec.Sum.sum -> k: LowParse.Spec.Sum.sum_key s
-> FStar.Pervasives.Lemma
(ensures LowParse.Spec.Combinators.synth_injective (LowParse.Spec.Sum.synth_sum_case s k)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"FStar.Classical.forall_intro",
"LowParse.Spec.Sum.__proj__Sum__item__type_of_tag",
"Prims.eq2",
"LowParse.Spec.Sum.__proj__Sum__item__tag_of_data",
"LowParse.Spec.Sum.__proj__Sum__item__synth_case",
"LowParse.Spec.Sum.synth_case_recip'",
"LowParse.Spec.Sum.__proj__Sum__item__key",
"LowParse.Spec.Sum.__proj__Sum__item__repr",
"LowParse.Spec.Sum.__proj__Sum__item__e",
"LowParse.Spec.Sum.__proj__Sum__item__data",
"LowParse.Spec.Sum.__proj__Sum__item__synth_case_recip",
"LowParse.Spec.Sum.__proj__Sum__item__synth_case_recip_synth_case",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"LowParse.Spec.Combinators.synth_injective",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Spec.Sum.sum_cases",
"LowParse.Spec.Sum.synth_sum_case",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma (synth_injective (synth_sum_case s k)) =
| Classical.forall_intro (Sum?.synth_case_recip_synth_case s k) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.dsum_enum | val dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) | val dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) | let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 48,
"end_line": 498,
"start_col": 0,
"start_line": 497
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: LowParse.Spec.Sum.dsum
-> LowParse.Spec.Enum.enum (LowParse.Spec.Sum.dsum_key_type t)
(LowParse.Spec.Sum.dsum_repr_type t) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_type_of_tag'",
"LowParse.Spec.Base.refine_with_tag",
"Prims.squash",
"Prims.eq2",
"LowParse.Spec.Sum.dsum_key_type",
"LowParse.Spec.Sum.dsum_repr_type"
] | [] | false | false | false | false | false | let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
| match t with | DSum _ _ e _ _ _ _ _ _ _ _ -> e | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.serialize_sum_cases | val serialize_sum_cases
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(sr: (x: sum_key s -> Tot (serializer (dsnd (f x)))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x)) | val serialize_sum_cases
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(sr: (x: sum_key s -> Tot (serializer (dsnd (f x)))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x)) | let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 27,
"end_line": 292,
"start_col": 0,
"start_line": 282
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.sum ->
f:
(x: LowParse.Spec.Sum.sum_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag s x))) ->
sr:
(x: LowParse.Spec.Sum.sum_key s -> LowParse.Spec.Base.serializer (FStar.Pervasives.dsnd (f x))
) ->
x: LowParse.Spec.Sum.sum_key s
-> LowParse.Spec.Base.serializer (LowParse.Spec.Sum.parse_sum_cases s f x) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Spec.Base.serializer",
"Prims.__proj__Mkdtuple2__item___1",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Base.serialize_ext",
"FStar.Pervasives.dfst",
"LowParse.Spec.Sum.sum_cases",
"LowParse.Spec.Sum.parse_sum_cases'",
"LowParse.Spec.Sum.serialize_sum_cases'",
"LowParse.Spec.Sum.weaken_parse_cases_kind",
"LowParse.Spec.Sum.parse_sum_cases",
"Prims.unit",
"FStar.Classical.forall_intro",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Sum.parse_sum_cases_eq'"
] | [] | false | false | false | false | false | let serialize_sum_cases
(s: sum)
(f: (x: sum_key s -> Tot (k: parser_kind & parser k (sum_type_of_tag s x))))
(sr: (x: sum_key s -> Tot (serializer (dsnd (f x)))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x)) =
| Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext (parse_sum_cases' s f x) (serialize_sum_cases' s f sr x) (parse_sum_cases s f x) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_dsum' | val parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: (x: dsum_key t -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t)) | val parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: (x: dsum_key t -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t)) | let parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: ((x: dsum_key t) -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t))
= parse_tagged_union
#kt
#(dsum_key t)
(parse_maybe_enum_key p (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
pc | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 6,
"end_line": 691,
"start_col": 0,
"start_line": 677
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k
let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)
let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x))
= synth_dsum_case_injective s x;
match x with
| Known x' -> (dsnd (f x') `parse_synth` synth_dsum_case s (Known x')) <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' -> g `parse_synth` synth_dsum_case s (Unknown x') <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
let parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma
(parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input)
= synth_dsum_case_injective s x;
match x with
| Known x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x'))) (synth_dsum_case s x) input;
parse_synth_eq (dsnd (f x')) (synth_dsum_case s (Known x')) input
| Unknown x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) g) (synth_dsum_case s x) input;
parse_synth_eq g (synth_dsum_case s (Unknown x')) input | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.dsum ->
p: LowParse.Spec.Base.parser kt (LowParse.Spec.Sum.dsum_repr_type t) ->
pc:
(x: LowParse.Spec.Sum.dsum_key t
-> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_cases t x))
-> LowParse.Spec.Base.parser (LowParse.Spec.Combinators.and_then_kind kt k)
(LowParse.Spec.Sum.dsum_type t) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_repr_type",
"LowParse.Spec.Sum.dsum_key",
"LowParse.Spec.Sum.dsum_cases",
"LowParse.Spec.Combinators.parse_tagged_union",
"LowParse.Spec.Enum.parse_maybe_enum_key",
"LowParse.Spec.Sum.dsum_key_type",
"LowParse.Spec.Sum.dsum_enum",
"LowParse.Spec.Sum.dsum_type",
"LowParse.Spec.Sum.dsum_tag_of_data",
"LowParse.Spec.Combinators.and_then_kind"
] | [] | false | false | false | false | false | let parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: (x: dsum_key t -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t)) =
| parse_tagged_union #kt
#(dsum_key t)
(parse_maybe_enum_key p (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
pc | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_dsum_kind | val parse_dsum_kind
(kt: parser_kind)
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(k: parser_kind)
: Tot parser_kind | val parse_dsum_kind
(kt: parser_kind)
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(k: parser_kind)
: Tot parser_kind | let parse_dsum_kind
(kt: parser_kind)
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k: parser_kind)
: Tot parser_kind
= and_then_kind kt (weaken_parse_dsum_cases_kind s f k) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 55,
"end_line": 700,
"start_col": 0,
"start_line": 694
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k
let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)
let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x))
= synth_dsum_case_injective s x;
match x with
| Known x' -> (dsnd (f x') `parse_synth` synth_dsum_case s (Known x')) <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' -> g `parse_synth` synth_dsum_case s (Unknown x') <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
let parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma
(parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input)
= synth_dsum_case_injective s x;
match x with
| Known x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x'))) (synth_dsum_case s x) input;
parse_synth_eq (dsnd (f x')) (synth_dsum_case s (Known x')) input
| Unknown x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) g) (synth_dsum_case s x) input;
parse_synth_eq g (synth_dsum_case s (Unknown x')) input
let parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: ((x: dsum_key t) -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t))
= parse_tagged_union
#kt
#(dsum_key t)
(parse_maybe_enum_key p (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
pc | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
kt: LowParse.Spec.Base.parser_kind ->
s: LowParse.Spec.Sum.dsum ->
f:
(x: LowParse.Spec.Sum.dsum_known_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag s x))) ->
k: LowParse.Spec.Base.parser_kind
-> LowParse.Spec.Base.parser_kind | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Sum.weaken_parse_dsum_cases_kind"
] | [] | false | false | false | false | false | let parse_dsum_kind
(kt: parser_kind)
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(k: parser_kind)
: Tot parser_kind =
| and_then_kind kt (weaken_parse_dsum_cases_kind s f k) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.serialize_sum_eq | val serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: (x: sum_key t -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma (requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures
(serialize (serialize_sum t s sc) x ==
(let tg = sum_tag_of_data t x in
(serialize (serialize_enum_key _ s (sum_enum t)) tg)
`Seq.append`
(serialize (sc tg) (synth_sum_case_recip t tg x))))) | val serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: (x: sum_key t -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma (requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures
(serialize (serialize_sum t s sc) x ==
(let tg = sum_tag_of_data t x in
(serialize (serialize_enum_key _ s (sum_enum t)) tg)
`Seq.append`
(serialize (sc tg) (synth_sum_case_recip t tg x))))) | let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 99,
"end_line": 348,
"start_col": 0,
"start_line": 329
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.sum ->
s: LowParse.Spec.Base.serializer p ->
sc:
(x: LowParse.Spec.Sum.sum_key t
-> LowParse.Spec.Base.serializer (FStar.Pervasives.dsnd (pc x))) ->
x: LowParse.Spec.Sum.sum_type t
-> FStar.Pervasives.Lemma
(requires
Mkparser_kind'?.parser_kind_subkind kt ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong)
(ensures
LowParse.Spec.Base.serialize (LowParse.Spec.Sum.serialize_sum t s sc) x ==
(let tg = LowParse.Spec.Sum.sum_tag_of_data t x in
FStar.Seq.Base.append (LowParse.Spec.Base.serialize (LowParse.Spec.Enum.serialize_enum_key
p
s
(LowParse.Spec.Sum.sum_enum t))
tg)
(LowParse.Spec.Base.serialize (sc tg) (LowParse.Spec.Sum.synth_sum_case_recip t tg x)))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_repr_type",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Sum.sum_type_of_tag",
"Prims.__proj__Mkdtuple2__item___1",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Sum.sum_type",
"LowParse.Spec.Combinators.serialize_synth_eq",
"LowParse.Spec.Sum.sum_cases",
"LowParse.Spec.Sum.synth_sum_case",
"LowParse.Spec.Sum.synth_sum_case_recip",
"Prims.unit",
"LowParse.Spec.Sum.synth_sum_case_inverse",
"LowParse.Spec.Sum.synth_sum_case_injective",
"LowParse.Spec.Sum.sum_tag_of_data",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"Prims.squash",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.serialize",
"LowParse.Spec.Sum.parse_sum_kind",
"LowParse.Spec.Sum.parse_sum",
"LowParse.Spec.Sum.serialize_sum",
"FStar.Seq.Base.append",
"LowParse.Spec.Combinators.parse_filter_kind",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.sum_key_type",
"LowParse.Spec.Sum.sum_enum",
"LowParse.Spec.Enum.parse_enum_key",
"LowParse.Spec.Enum.serialize_enum_key",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: (x: sum_key t -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma (requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures
(serialize (serialize_sum t s sc) x ==
(let tg = sum_tag_of_data t x in
(serialize (serialize_enum_key _ s (sum_enum t)) tg)
`Seq.append`
(serialize (sc tg) (synth_sum_case_recip t tg x))))) =
| let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x | false |
Vale.AES.GCM.fsti | Vale.AES.GCM.set_to_one_LE | val set_to_one_LE (q: quad32) : quad32 | val set_to_one_LE (q: quad32) : quad32 | let set_to_one_LE (q:quad32) : quad32 = four_insert q 1 0 | {
"file_name": "vale/code/crypto/aes/Vale.AES.GCM.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 57,
"end_line": 20,
"start_col": 0,
"start_line": 20
} | module Vale.AES.GCM
open Vale.Def.Opaque_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.AES.GCM_s
open Vale.AES.AES_s
open Vale.AES.GCM_helpers
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GHash_s
open FStar.Mul
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open FStar.Calc
open Vale.Def.Words.Four_s | {
"checked_file": "/",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Calc",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | q: Vale.Def.Types_s.quad32 -> Vale.Def.Types_s.quad32 | Prims.Tot | [
"total"
] | [] | [
"Vale.Def.Types_s.quad32",
"Vale.Def.Words.Four_s.four_insert",
"Vale.Def.Types_s.nat32"
] | [] | false | false | false | true | false | let set_to_one_LE (q: quad32) : quad32 =
| four_insert q 1 0 | false |
Wasm11.fst | Wasm11.test1 | val test1: Prims.unit -> SteelT bool emp (fun _ -> emp) | val test1: Prims.unit -> SteelT bool emp (fun _ -> emp) | let test1 () : SteelT bool emp (fun _ -> emp) =
let r = malloc 0uL 8sz in
ghost_split r 4sz;
let r1 = split_l r 4sz in
let r2 = split_r r 4sz in
change_equal_slprop (varray (split_l r 4sz)) (varray r1);
change_equal_slprop (varray (split_r r 4sz)) (varray r2);
let _ = mk 4s in
let b = ptrdiff r2 r1 in
ghost_join r1 r2 ();
change_equal_slprop
(varray (merge r1 r2))
(varray r);
// Free not supported in Wasm
drop (varray r);
return (b = mk 4s) | {
"file_name": "share/steel/tests/krml/Wasm11.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 20,
"end_line": 27,
"start_col": 0,
"start_line": 12
} | module Wasm11
open FStar.SizeT
open FStar.PtrdiffT
open FStar.UInt64
open Steel.Effect.Atomic
open Steel.Effect
open Steel.Array
(* WASM tests for pointer subtraction *) | {
"checked_file": "/",
"dependencies": [
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"Steel.Array.fsti.checked",
"prims.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.SizeT.fsti.checked",
"FStar.PtrdiffT.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked"
],
"interface_file": false,
"source_file": "Wasm11.fst"
} | [
{
"abbrev": false,
"full_module": "Steel.Array",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.UInt64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PtrdiffT",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.SizeT",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: Prims.unit -> Steel.Effect.SteelT Prims.bool | Steel.Effect.SteelT | [] | [] | [
"Prims.unit",
"Steel.Effect.Atomic.return",
"Prims.bool",
"FStar.Ghost.hide",
"FStar.Set.set",
"Steel.Memory.iname",
"FStar.Set.empty",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"Steel.Effect.Common.vprop",
"Steel.Effect.Common.req",
"Steel.Effect.Common.rm",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit",
"Steel.Effect.Common.emp",
"Prims.op_Equality",
"FStar.PtrdiffT.t",
"FStar.PtrdiffT.mk",
"FStar.Int16.__int_to_t",
"Steel.Effect.Atomic.drop",
"Steel.Array.varray",
"FStar.UInt64.t",
"Steel.Effect.Atomic.change_equal_slprop",
"Steel.ST.Array.merge",
"Steel.ST.Array.array",
"Steel.Array.ghost_join",
"Steel.FractionalPermission.full_perm",
"Steel.Array.ptrdiff",
"Steel.ST.Array.split_r",
"FStar.SizeT.__uint_to_t",
"Steel.ST.Array.split_l",
"FStar.SizeT.t",
"Steel.Array.ghost_split",
"Steel.Array.malloc",
"FStar.UInt64.__uint_to_t"
] | [] | false | true | false | false | false | let test1 () : SteelT bool emp (fun _ -> emp) =
| let r = malloc 0uL 8sz in
ghost_split r 4sz;
let r1 = split_l r 4sz in
let r2 = split_r r 4sz in
change_equal_slprop (varray (split_l r 4sz)) (varray r1);
change_equal_slprop (varray (split_r r 4sz)) (varray r2);
let _ = mk 4s in
let b = ptrdiff r2 r1 in
ghost_join r1 r2 ();
change_equal_slprop (varray (merge r1 r2)) (varray r);
drop (varray r);
return (b = mk 4s) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_dsum | val parse_dsum
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag t))
: Tot (parser (parse_dsum_kind kt t f k) (dsum_type t)) | val parse_dsum
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag t))
: Tot (parser (parse_dsum_kind kt t f k) (dsum_type t)) | let parse_dsum
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag t))
: Tot (parser (parse_dsum_kind kt t f k) (dsum_type t))
= parse_dsum' t p (parse_dsum_cases t f g) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 42,
"end_line": 710,
"start_col": 0,
"start_line": 702
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k
let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)
let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x))
= synth_dsum_case_injective s x;
match x with
| Known x' -> (dsnd (f x') `parse_synth` synth_dsum_case s (Known x')) <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' -> g `parse_synth` synth_dsum_case s (Unknown x') <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
let parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma
(parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input)
= synth_dsum_case_injective s x;
match x with
| Known x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x'))) (synth_dsum_case s x) input;
parse_synth_eq (dsnd (f x')) (synth_dsum_case s (Known x')) input
| Unknown x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) g) (synth_dsum_case s x) input;
parse_synth_eq g (synth_dsum_case s (Unknown x')) input
let parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: ((x: dsum_key t) -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t))
= parse_tagged_union
#kt
#(dsum_key t)
(parse_maybe_enum_key p (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_dsum_kind
(kt: parser_kind)
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k: parser_kind)
: Tot parser_kind
= and_then_kind kt (weaken_parse_dsum_cases_kind s f k) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.dsum ->
p: LowParse.Spec.Base.parser kt (LowParse.Spec.Sum.dsum_repr_type t) ->
f:
(x: LowParse.Spec.Sum.dsum_known_key t
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag t x))) ->
g: LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_unknown_tag t)
-> LowParse.Spec.Base.parser (LowParse.Spec.Sum.parse_dsum_kind kt t f k)
(LowParse.Spec.Sum.dsum_type t) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_repr_type",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Spec.Sum.parse_dsum'",
"LowParse.Spec.Sum.weaken_parse_dsum_cases_kind",
"LowParse.Spec.Sum.parse_dsum_cases",
"LowParse.Spec.Sum.parse_dsum_kind",
"LowParse.Spec.Sum.dsum_type"
] | [] | false | false | false | false | false | let parse_dsum
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag t))
: Tot (parser (parse_dsum_kind kt t f k) (dsum_type t)) =
| parse_dsum' t p (parse_dsum_cases t f g) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_sum_eq' | val parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input ==
(match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (parse_sum_cases' t pc k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x))) | val parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input ==
(match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (parse_sum_cases' t pc k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x))) | let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 9,
"end_line": 207,
"start_col": 0,
"start_line": 176
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.sum ->
p: LowParse.Spec.Base.parser kt (LowParse.Spec.Sum.sum_repr_type t) ->
pc:
(x: LowParse.Spec.Sum.sum_key t
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag t x))) ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_sum t p pc) input ==
(match
LowParse.Spec.Base.parse (LowParse.Spec.Enum.parse_enum_key p
(LowParse.Spec.Sum.sum_enum t))
input
with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ k consumed_k) ->
let input_k = FStar.Seq.Base.slice input consumed_k (FStar.Seq.Base.length input) in
(match LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_sum_cases' t pc k) input_k with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ x consumed_x) ->
FStar.Pervasives.Native.Some (x, consumed_k + consumed_x))
<:
FStar.Pervasives.Native.option (LowParse.Spec.Sum.sum_type t *
LowParse.Spec.Base.consumed_length input))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_repr_type",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.parse_tagged_union_eq_gen",
"LowParse.Spec.Combinators.parse_filter_kind",
"LowParse.Spec.Enum.parse_enum_key",
"LowParse.Spec.Sum.sum_key_type",
"LowParse.Spec.Sum.sum_enum",
"LowParse.Spec.Sum.sum_type",
"LowParse.Spec.Sum.sum_tag_of_data",
"LowParse.Spec.Sum.weaken_parse_cases_kind",
"LowParse.Spec.Sum.parse_sum_cases",
"Prims.unit",
"FStar.Pervasives.dfst",
"LowParse.Spec.Sum.parse_sum_cases'",
"LowParse.Spec.Sum.parse_sum_cases_eq'",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Sum.parse_sum",
"LowParse.Spec.Enum.enum_key",
"FStar.Pervasives.Native.None",
"LowParse.Spec.Sum.sum_cases",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input ==
(match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (parse_sum_cases' t pc k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x))) =
| parse_tagged_union_eq_gen #(parse_filter_kind kt) #(sum_key t) (parse_enum_key p (sum_enum t))
#(sum_type t) (sum_tag_of_data t) (parse_sum_cases t pc) (parse_enum_key p (sum_enum t))
(fun input -> ()) (fun k -> dfst (pc k)) (parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input) input | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_sum_eq'' | val parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input ==
(match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
match k with
| Known k ->
(match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) ->
Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x))
| _ -> None)) | val parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input ==
(match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
match k with
| Known k ->
(match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) ->
Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x))
| _ -> None)) | let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 40,
"end_line": 255,
"start_col": 0,
"start_line": 233
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.sum ->
p: LowParse.Spec.Base.parser kt (LowParse.Spec.Sum.sum_repr_type t) ->
pc:
(x: LowParse.Spec.Sum.sum_key t
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag t x))) ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_sum t p pc) input ==
(match LowParse.Spec.Base.parse p input with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ k' consumed_k) ->
let input_k = FStar.Seq.Base.slice input consumed_k (FStar.Seq.Base.length input) in
let k = LowParse.Spec.Enum.maybe_enum_key_of_repr (LowParse.Spec.Sum.sum_enum t) k' in
(match k with
| LowParse.Spec.Enum.Known #_ #_ #_ k ->
(match LowParse.Spec.Base.parse (FStar.Pervasives.dsnd (pc k)) input_k with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some
#_
(FStar.Pervasives.Native.Mktuple2 #_ #_ x consumed_x) ->
FStar.Pervasives.Native.Some
(LowParse.Spec.Sum.synth_sum_case t k x,
consumed_k + consumed_x))
<:
FStar.Pervasives.Native.option (LowParse.Spec.Sum.sum_type t *
LowParse.Spec.Base.consumed_length input)
| _ -> FStar.Pervasives.Native.None)
<:
FStar.Pervasives.Native.option (LowParse.Spec.Sum.sum_type t *
LowParse.Spec.Base.consumed_length input))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_repr_type",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Bytes.bytes",
"LowParse.Spec.Enum.parse_enum_key_eq",
"LowParse.Spec.Sum.sum_key_type",
"LowParse.Spec.Sum.sum_enum",
"Prims.unit",
"LowParse.Spec.Sum.parse_sum_eq",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Sum.sum_type",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Sum.parse_sum",
"FStar.Pervasives.Native.None",
"LowParse.Spec.Enum.enum_key",
"FStar.Pervasives.dsnd",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"LowParse.Spec.Sum.synth_sum_case",
"Prims.op_Addition",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Enum.maybe_enum_key_of_repr",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input ==
(match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
match k with
| Known k ->
(match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) ->
Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x))
| _ -> None)) =
| parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_case_recip_synth_case_post | val synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: (x: enum_key e -> y: type_of_tag x -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: (k: enum_key e -> x: refine_with_tag tag_of_data k -> Tot (type_of_tag k)))
(x: key)
: GTot Type0 | val synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: (x: enum_key e -> y: type_of_tag x -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: (k: enum_key e -> x: refine_with_tag tag_of_data k -> Tot (type_of_tag k)))
(x: key)
: GTot Type0 | let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 388,
"start_col": 0,
"start_line": 374
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: LowParse.Spec.Enum.enum key repr ->
tag_of_data: (_: data -> LowParse.Spec.Enum.enum_key e) ->
type_of_tag: (_: LowParse.Spec.Enum.enum_key e -> Type) ->
synth_case:
(x: LowParse.Spec.Enum.enum_key e -> y: type_of_tag x
-> LowParse.Spec.Base.refine_with_tag tag_of_data x) ->
synth_case_recip:
(k: LowParse.Spec.Enum.enum_key e -> x: LowParse.Spec.Base.refine_with_tag tag_of_data k
-> type_of_tag k) ->
x: key
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Base.refine_with_tag",
"Prims.l_imp",
"Prims.b2t",
"LowParse.Spec.Enum.list_mem",
"LowParse.Spec.Enum.list_map",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.fst",
"Prims.l_Forall",
"Prims.eq2",
"LowParse.Spec.Sum.synth_case_recip'"
] | [] | false | false | false | false | true | let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: (x: enum_key e -> y: type_of_tag x -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: (k: enum_key e -> x: refine_with_tag tag_of_data k -> Tot (type_of_tag k)))
(x: key)
: GTot Type0 =
| list_mem x (list_map fst e) ==>
(forall (y: type_of_tag x).
{:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_dsum_case' | val synth_dsum_case'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case:
(x: enum_key e -> y: type_of_known_tag x -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case:
(x: unknown_enum_repr e -> type_of_unknown_tag
-> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data | val synth_dsum_case'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case:
(x: enum_key e -> y: type_of_known_tag x -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case:
(x: unknown_enum_repr e -> type_of_unknown_tag
-> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data | let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 35,
"end_line": 445,
"start_col": 0,
"start_line": 430
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: LowParse.Spec.Enum.enum key repr ->
tag_of_data: (_: data -> Prims.GTot (LowParse.Spec.Enum.maybe_enum_key e)) ->
type_of_known_tag: (_: LowParse.Spec.Enum.enum_key e -> Type) ->
type_of_unknown_tag: Type ->
synth_known_case:
(x: LowParse.Spec.Enum.enum_key e -> y: type_of_known_tag x
-> LowParse.Spec.Base.refine_with_tag tag_of_data (LowParse.Spec.Enum.Known x)) ->
synth_unknown_case:
(x: LowParse.Spec.Enum.unknown_enum_repr e -> _: type_of_unknown_tag
-> LowParse.Spec.Base.refine_with_tag tag_of_data (LowParse.Spec.Enum.Unknown x)) ->
xy:
Prims.dtuple2 (LowParse.Spec.Enum.maybe_enum_key e)
(fun x -> LowParse.Spec.Sum.dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
-> Prims.GTot data | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Enum.Known",
"LowParse.Spec.Enum.unknown_enum_repr",
"LowParse.Spec.Enum.Unknown",
"Prims.dtuple2",
"LowParse.Spec.Sum.dsum_type_of_tag'"
] | [] | false | false | false | false | false | let synth_dsum_case'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case:
(x: enum_key e -> y: type_of_known_tag x -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case:
(x: unknown_enum_repr e -> type_of_unknown_tag
-> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data =
| let (| x , y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_sum_case_recip | val synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) | val synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) | let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 22,
"end_line": 260,
"start_col": 0,
"start_line": 258
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: LowParse.Spec.Sum.sum -> k: LowParse.Spec.Sum.sum_key s -> x: LowParse.Spec.Sum.sum_cases s k
-> LowParse.Spec.Sum.sum_type_of_tag s k | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Sum.sum_key",
"LowParse.Spec.Sum.sum_cases",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Base.refine_with_tag",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"LowParse.Spec.Sum.synth_case_recip'",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Sum.sum_type_of_tag"
] | [] | false | false | false | false | false | let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
| match s with | Sum _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip k x | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.serialize_sum | val serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: (x: sum_key t -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | val serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: (x: sum_key t -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 77,
"end_line": 327,
"start_col": 0,
"start_line": 316
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.sum ->
s: LowParse.Spec.Base.serializer p ->
sc:
(x: LowParse.Spec.Sum.sum_key t
-> LowParse.Spec.Base.serializer (FStar.Pervasives.dsnd (pc x)))
-> Prims.Pure (LowParse.Spec.Base.serializer (LowParse.Spec.Sum.parse_sum t p pc)) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_repr_type",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Sum.sum_type_of_tag",
"Prims.__proj__Mkdtuple2__item___1",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Sum.serialize_sum'",
"LowParse.Spec.Sum.weaken_parse_cases_kind",
"LowParse.Spec.Sum.parse_sum_cases",
"LowParse.Spec.Sum.serialize_sum_cases",
"LowParse.Spec.Sum.parse_sum_kind",
"LowParse.Spec.Sum.sum_type",
"LowParse.Spec.Sum.parse_sum",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"Prims.l_True"
] | [] | false | false | false | false | false | let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: (x: sum_key t -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) =
| serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.dsum_tag_of_data | val dsum_tag_of_data (t: dsum) : Tot (x: dsum_type t -> Tot (dsum_key t)) | val dsum_tag_of_data (t: dsum) : Tot (x: dsum_type t -> Tot (dsum_key t)) | let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 68,
"end_line": 522,
"start_col": 0,
"start_line": 521
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: LowParse.Spec.Sum.dsum -> x: LowParse.Spec.Sum.dsum_type t -> LowParse.Spec.Sum.dsum_key t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_type_of_tag'",
"LowParse.Spec.Base.refine_with_tag",
"Prims.squash",
"Prims.eq2",
"LowParse.Spec.Sum.dsum_type",
"LowParse.Spec.Sum.dsum_key"
] | [] | false | false | false | false | false | let dsum_tag_of_data (t: dsum) : Tot (x: dsum_type t -> Tot (dsum_key t)) =
| match t with | DSum _ _ _ _ tag_of_data _ _ _ _ _ _ -> tag_of_data | false |
LowParse.Spec.Tac.Combinators.fst | LowParse.Spec.Tac.Combinators.synth_pairs_to_struct_to_pairs_tac' | val synth_pairs_to_struct_to_pairs_tac' (n: nat) : Tac unit | val synth_pairs_to_struct_to_pairs_tac' (n: nat) : Tac unit | let synth_pairs_to_struct_to_pairs_tac' (n: nat) : Tac unit =
norm [delta]; // _only [(`%synth_inverse); (`%t8')]];
let x = forall_intro () in
destruct_lhs_pairs (binder_to_term x) n | {
"file_name": "src/lowparse/LowParse.Spec.Tac.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 41,
"end_line": 23,
"start_col": 0,
"start_line": 20
} | module LowParse.Spec.Tac.Combinators
include LowParse.Spec.Combinators
open LowParse.TacLib
(* for structs *)
let rec destruct_lhs_pairs (t: FStar.Tactics.term) (n: nat) : FStar.Tactics.Tac unit =
if n = 0
then trefl ()
else begin
destruct t;
let a = intro () in
let b = intro () in
let abeq = intro () in
rewrite abeq;
destruct_lhs_pairs (binder_to_term a) (n - 1)
end | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.TacLib.fst.checked",
"LowParse.Spec.Combinators.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Tac.Combinators.fst"
} | [
{
"abbrev": false,
"full_module": "LowParse.TacLib",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Tac",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Tac",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | n: Prims.nat -> FStar.Tactics.Effect.Tac Prims.unit | FStar.Tactics.Effect.Tac | [] | [] | [
"Prims.nat",
"LowParse.Spec.Tac.Combinators.destruct_lhs_pairs",
"Prims.unit",
"FStar.Stubs.Reflection.Types.term",
"FStar.Tactics.V1.Derived.binder_to_term",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Tactics.V1.Logic.forall_intro",
"FStar.Stubs.Tactics.V1.Builtins.norm",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.delta",
"Prims.Nil"
] | [] | false | true | false | false | false | let synth_pairs_to_struct_to_pairs_tac' (n: nat) : Tac unit =
| norm [delta];
let x = forall_intro () in
destruct_lhs_pairs (binder_to_term x) n | false |
LowParse.Spec.Tac.Combinators.fst | LowParse.Spec.Tac.Combinators.synth_pairs_to_struct_to_pairs_tac | val synth_pairs_to_struct_to_pairs_tac
(#struct_t #pairs_t: Type)
(recip: (struct_t -> GTot pairs_t))
(n: nat)
: Tac unit | val synth_pairs_to_struct_to_pairs_tac
(#struct_t #pairs_t: Type)
(recip: (struct_t -> GTot pairs_t))
(n: nat)
: Tac unit | let synth_pairs_to_struct_to_pairs_tac (#struct_t: Type) (#pairs_t: Type) (recip: struct_t -> GTot pairs_t) (n: nat) : Tac unit =
apply (quote (synth_inverse_synth_injective' recip));
synth_pairs_to_struct_to_pairs_tac' n | {
"file_name": "src/lowparse/LowParse.Spec.Tac.Combinators.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 39,
"end_line": 27,
"start_col": 0,
"start_line": 25
} | module LowParse.Spec.Tac.Combinators
include LowParse.Spec.Combinators
open LowParse.TacLib
(* for structs *)
let rec destruct_lhs_pairs (t: FStar.Tactics.term) (n: nat) : FStar.Tactics.Tac unit =
if n = 0
then trefl ()
else begin
destruct t;
let a = intro () in
let b = intro () in
let abeq = intro () in
rewrite abeq;
destruct_lhs_pairs (binder_to_term a) (n - 1)
end
let synth_pairs_to_struct_to_pairs_tac' (n: nat) : Tac unit =
norm [delta]; // _only [(`%synth_inverse); (`%t8')]];
let x = forall_intro () in
destruct_lhs_pairs (binder_to_term x) n | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.TacLib.fst.checked",
"LowParse.Spec.Combinators.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Tac.Combinators.fst"
} | [
{
"abbrev": false,
"full_module": "LowParse.TacLib",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Tac",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Tac",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | recip: (_: struct_t -> Prims.GTot pairs_t) -> n: Prims.nat -> FStar.Tactics.Effect.Tac Prims.unit | FStar.Tactics.Effect.Tac | [] | [] | [
"Prims.nat",
"LowParse.Spec.Tac.Combinators.synth_pairs_to_struct_to_pairs_tac'",
"Prims.unit",
"FStar.Tactics.V1.Derived.apply",
"FStar.Stubs.Reflection.Types.term",
"LowParse.Spec.Combinators.synth_inverse_synth_injective'"
] | [] | false | true | false | false | false | let synth_pairs_to_struct_to_pairs_tac
(#struct_t #pairs_t: Type)
(recip: (struct_t -> GTot pairs_t))
(n: nat)
: Tac unit =
| apply (quote (synth_inverse_synth_injective' recip));
synth_pairs_to_struct_to_pairs_tac' n | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_sum_eq | val parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input ==
(match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) ->
Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x))) | val parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input ==
(match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) ->
Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x))) | let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 61,
"end_line": 231,
"start_col": 0,
"start_line": 209
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.sum ->
p: LowParse.Spec.Base.parser kt (LowParse.Spec.Sum.sum_repr_type t) ->
pc:
(x: LowParse.Spec.Sum.sum_key t
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.sum_type_of_tag t x))) ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_sum t p pc) input ==
(match
LowParse.Spec.Base.parse (LowParse.Spec.Enum.parse_enum_key p
(LowParse.Spec.Sum.sum_enum t))
input
with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ k consumed_k) ->
let input_k = FStar.Seq.Base.slice input consumed_k (FStar.Seq.Base.length input) in
(match LowParse.Spec.Base.parse (FStar.Pervasives.dsnd (pc k)) input_k with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ x consumed_x) ->
FStar.Pervasives.Native.Some
(LowParse.Spec.Sum.synth_sum_case t k x,
consumed_k + consumed_x))
<:
FStar.Pervasives.Native.option (LowParse.Spec.Sum.sum_type t *
LowParse.Spec.Base.consumed_length input))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_repr_type",
"LowParse.Spec.Sum.sum_key",
"Prims.dtuple2",
"LowParse.Spec.Sum.sum_type_of_tag",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.sum_key_type",
"LowParse.Spec.Sum.sum_enum",
"LowParse.Spec.Enum.parse_enum_key",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Combinators.parse_synth_eq",
"Prims.__proj__Mkdtuple2__item___1",
"LowParse.Spec.Sum.sum_cases",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Sum.synth_sum_case",
"Prims.unit",
"LowParse.Spec.Sum.synth_sum_case_injective",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"LowParse.Spec.Sum.parse_sum_eq'",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Sum.sum_type",
"LowParse.Spec.Sum.parse_sum",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: (x: sum_key t -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input ==
(match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) ->
Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x))) =
| parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.serialize_sum' | val serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: (x: sum_key t -> Tot (parser k (sum_cases t x))))
(sc: (x: sum_key t -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | val serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: (x: sum_key t -> Tot (parser k (sum_cases t x))))
(sc: (x: sum_key t -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 6,
"end_line": 314,
"start_col": 0,
"start_line": 294
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.sum ->
s: LowParse.Spec.Base.serializer p ->
sc: (x: LowParse.Spec.Sum.sum_key t -> LowParse.Spec.Base.serializer (pc x))
-> Prims.Pure (LowParse.Spec.Base.serializer (LowParse.Spec.Sum.parse_sum' t p pc)) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.sum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.sum_repr_type",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Sum.sum_key",
"LowParse.Spec.Sum.sum_cases",
"LowParse.Spec.Combinators.serialize_tagged_union",
"LowParse.Spec.Combinators.parse_filter_kind",
"LowParse.Spec.Enum.parse_enum_key",
"LowParse.Spec.Sum.sum_key_type",
"LowParse.Spec.Sum.sum_enum",
"LowParse.Spec.Enum.serialize_enum_key",
"LowParse.Spec.Sum.sum_type",
"LowParse.Spec.Sum.sum_tag_of_data",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Sum.parse_sum'",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"Prims.l_True"
] | [] | false | false | false | false | false | let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: (x: sum_key t -> Tot (parser k (sum_cases t x))))
(sc: (x: sum_key t -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) =
| serialize_tagged_union #(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc | false |
Vale.AES.GCM.fsti | Vale.AES.GCM.lower3_equal | val lower3_equal (q0 q1: quad32) : bool | val lower3_equal (q0 q1: quad32) : bool | let lower3_equal (q0 q1:quad32) : bool =
q0.lo0 = q1.lo0 &&
q0.lo1 = q1.lo1 &&
q0.hi2 = q1.hi2 | {
"file_name": "vale/code/crypto/aes/Vale.AES.GCM.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 17,
"end_line": 30,
"start_col": 0,
"start_line": 27
} | module Vale.AES.GCM
open Vale.Def.Opaque_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.AES.GCM_s
open Vale.AES.AES_s
open Vale.AES.GCM_helpers
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GHash_s
open FStar.Mul
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open FStar.Calc
open Vale.Def.Words.Four_s
let set_to_one_LE (q:quad32) : quad32 = four_insert q 1 0 // Mkfour 1 q.lo1 q.hi2 q.hi3
let upper3_equal (q0 q1:quad32) : bool =
q0.lo1 = q1.lo1 &&
q0.hi2 = q1.hi2 &&
q0.hi3 = q1.hi3 | {
"checked_file": "/",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Calc",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | q0: Vale.Def.Types_s.quad32 -> q1: Vale.Def.Types_s.quad32 -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Vale.Def.Types_s.quad32",
"Prims.op_AmpAmp",
"Prims.op_Equality",
"Vale.Def.Types_s.nat32",
"Vale.Def.Words_s.__proj__Mkfour__item__lo0",
"Vale.Def.Words_s.__proj__Mkfour__item__lo1",
"Vale.Def.Words_s.__proj__Mkfour__item__hi2",
"Prims.bool"
] | [] | false | false | false | true | false | let lower3_equal (q0 q1: quad32) : bool =
| q0.lo0 = q1.lo0 && q0.lo1 = q1.lo1 && q0.hi2 = q1.hi2 | false |
Hacl.Impl.Ed25519.Group.fst | Hacl.Impl.Ed25519.Group.point_add | val point_add : BE.lmul_st U64 20ul 0ul mk_to_ed25519_comm_monoid | val point_add : BE.lmul_st U64 20ul 0ul mk_to_ed25519_comm_monoid | let point_add ctx x y xy =
let h0 = ST.get () in
Spec.Ed25519.Lemmas.to_aff_point_add_lemma
(F51.refl_ext_point (as_seq h0 x)) (F51.refl_ext_point (as_seq h0 y));
Hacl.Impl.Ed25519.PointAdd.point_add xy x y | {
"file_name": "code/ed25519/Hacl.Impl.Ed25519.Group.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 45,
"end_line": 46,
"start_col": 0,
"start_line": 42
} | module Hacl.Impl.Ed25519.Group
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum25519
open Hacl.Impl.Ed25519.PointConstants
module LSeq = Lib.Sequence
module F51 = Hacl.Impl.Ed25519.Field51
module BE = Hacl.Impl.Exponentiation.Definitions
module S = Spec.Ed25519
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
unfold
let a_spec = S.aff_point_c
unfold
let refl (a:LSeq.lseq uint64 20{F51.linv a}) : GTot a_spec =
S.to_aff_point (F51.refl_ext_point a)
unfold
let linv_ctx (a:LSeq.lseq uint64 0) : Type0 = True
inline_for_extraction noextract
let mk_to_ed25519_comm_monoid : BE.to_comm_monoid U64 20ul 0ul = {
BE.a_spec = a_spec;
BE.comm_monoid = S.mk_ed25519_comm_monoid;
BE.linv_ctx = linv_ctx;
BE.linv = F51.linv;
BE.refl = refl;
}
inline_for_extraction noextract | {
"checked_file": "/",
"dependencies": [
"Spec.Ed25519.Lemmas.fsti.checked",
"Spec.Ed25519.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Exponentiation.Definitions.fst.checked",
"Hacl.Impl.Ed25519.PointDouble.fst.checked",
"Hacl.Impl.Ed25519.PointConstants.fst.checked",
"Hacl.Impl.Ed25519.PointAdd.fst.checked",
"Hacl.Impl.Ed25519.Field51.fst.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Ed25519.Group.fst"
} | [
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation.Definitions",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.PointConstants",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Hacl.Impl.Exponentiation.Definitions.lmul_st Lib.IntTypes.U64
20ul
0ul
Hacl.Impl.Ed25519.Group.mk_to_ed25519_comm_monoid | Prims.Tot | [
"total"
] | [] | [
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.U64",
"Lib.IntTypes.SEC",
"FStar.UInt32.__uint_to_t",
"Hacl.Impl.Ed25519.PointAdd.point_add",
"Prims.unit",
"Spec.Ed25519.Lemmas.to_aff_point_add_lemma",
"Hacl.Impl.Ed25519.Field51.refl_ext_point",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get"
] | [] | false | false | false | false | false | let point_add ctx x y xy =
| let h0 = ST.get () in
Spec.Ed25519.Lemmas.to_aff_point_add_lemma (F51.refl_ext_point (as_seq h0 x))
(F51.refl_ext_point (as_seq h0 y));
Hacl.Impl.Ed25519.PointAdd.point_add xy x y | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_dsum_case | val synth_dsum_case (s: dsum)
: Tot (x: dsum_key s -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x)) | val synth_dsum_case (s: dsum)
: Tot (x: dsum_key s -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x)) | let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 64,
"end_line": 566,
"start_col": 0,
"start_line": 563
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k' | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.dsum ->
x: LowParse.Spec.Sum.dsum_key s ->
_: LowParse.Spec.Sum.dsum_type_of_tag s x
-> LowParse.Spec.Base.refine_with_tag (LowParse.Spec.Sum.dsum_tag_of_data s) x | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_type_of_tag'",
"LowParse.Spec.Base.refine_with_tag",
"Prims.squash",
"Prims.eq2",
"LowParse.Spec.Sum.dsum_key",
"LowParse.Spec.Sum.dsum_type_of_tag",
"LowParse.Spec.Sum.dsum_type",
"LowParse.Spec.Sum.dsum_tag_of_data"
] | [] | false | false | false | false | false | let synth_dsum_case (s: dsum)
: Tot (x: dsum_key s -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x)) =
| match s with | DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_dsum_case_injective | val synth_dsum_case_injective (s: dsum) (x: dsum_key s)
: Lemma (synth_injective (synth_dsum_case s x)) | val synth_dsum_case_injective (s: dsum) (x: dsum_key s)
: Lemma (synth_injective (synth_dsum_case s x)) | let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 28,
"end_line": 602,
"start_col": 0,
"start_line": 574
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: LowParse.Spec.Sum.dsum -> x: LowParse.Spec.Sum.dsum_key s
-> FStar.Pervasives.Lemma
(ensures LowParse.Spec.Combinators.synth_injective (LowParse.Spec.Sum.synth_dsum_case s x)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_key",
"FStar.Classical.forall_intro_2",
"LowParse.Spec.Sum.dsum_type_of_tag",
"Prims.l_imp",
"Prims.eq2",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Sum.dsum_type",
"LowParse.Spec.Sum.dsum_tag_of_data",
"LowParse.Spec.Sum.synth_dsum_case",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"FStar.Classical.move_requires",
"Prims._assert",
"LowParse.Spec.Sum.synth_dsum_case_recip",
"LowParse.Spec.Sum.__proj__DSum__item__synth_case_recip_synth_case",
"LowParse.Spec.Combinators.synth_injective"
] | [] | false | false | true | false | false | let synth_dsum_case_injective (s: dsum) (x: dsum_key s)
: Lemma (synth_injective (synth_dsum_case s x)) =
| let f (y1 y2: dsum_type_of_tag s x)
: Lemma (requires (synth_dsum_case s x y1 == synth_dsum_case s x y2)) (ensures (y1 == y2)) =
let k1:squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2:squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g (y1 y2: dsum_type_of_tag s x)
: Lemma (synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2) =
Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g | false |
Vale.AES.GCM.fsti | Vale.AES.GCM.upper3_equal | val upper3_equal (q0 q1: quad32) : bool | val upper3_equal (q0 q1: quad32) : bool | let upper3_equal (q0 q1:quad32) : bool =
q0.lo1 = q1.lo1 &&
q0.hi2 = q1.hi2 &&
q0.hi3 = q1.hi3 | {
"file_name": "vale/code/crypto/aes/Vale.AES.GCM.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 17,
"end_line": 25,
"start_col": 0,
"start_line": 22
} | module Vale.AES.GCM
open Vale.Def.Opaque_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.AES.GCM_s
open Vale.AES.AES_s
open Vale.AES.GCM_helpers
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GHash_s
open FStar.Mul
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open FStar.Calc
open Vale.Def.Words.Four_s
let set_to_one_LE (q:quad32) : quad32 = four_insert q 1 0 // Mkfour 1 q.lo1 q.hi2 q.hi3 | {
"checked_file": "/",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Calc",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | q0: Vale.Def.Types_s.quad32 -> q1: Vale.Def.Types_s.quad32 -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Vale.Def.Types_s.quad32",
"Prims.op_AmpAmp",
"Prims.op_Equality",
"Vale.Def.Types_s.nat32",
"Vale.Def.Words_s.__proj__Mkfour__item__lo1",
"Vale.Def.Words_s.__proj__Mkfour__item__hi2",
"Vale.Def.Words_s.__proj__Mkfour__item__hi3",
"Prims.bool"
] | [] | false | false | false | true | false | let upper3_equal (q0 q1: quad32) : bool =
| q0.lo1 = q1.lo1 && q0.hi2 = q1.hi2 && q0.hi3 = q1.hi3 | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_dsum_type_of_tag | val parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) | val parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) | let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 135,
"end_line": 613,
"start_col": 0,
"start_line": 604
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.dsum ->
f:
(x: LowParse.Spec.Sum.dsum_known_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag s x))) ->
g: LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_unknown_tag s) ->
x: LowParse.Spec.Sum.dsum_key s
-> LowParse.Spec.Base.parser (LowParse.Spec.Sum.weaken_parse_dsum_cases_kind s f k)
(LowParse.Spec.Sum.dsum_type_of_tag s x) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Spec.Sum.dsum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_key_type",
"LowParse.Spec.Sum.dsum_repr_type",
"LowParse.Spec.Sum.dsum_enum",
"LowParse.Spec.Base.coerce",
"LowParse.Spec.Sum.weaken_parse_dsum_cases_kind",
"LowParse.Spec.Sum.dsum_type_of_tag",
"LowParse.Spec.Base.weaken",
"Prims.__proj__Mkdtuple2__item___1",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Enum.unknown_enum_repr"
] | [] | false | false | false | false | false | let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) =
| match x with
| Known x' ->
coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
(weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' ->
weaken (weaken_parse_dsum_cases_kind s f k) g
<:
parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.weaken_parse_dsum_cases_kind | val weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(k': parser_kind)
: Tot parser_kind | val weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(k': parser_kind)
: Tot parser_kind | let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k' | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 45,
"end_line": 552,
"start_col": 0,
"start_line": 542
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.dsum ->
f:
(x: LowParse.Spec.Sum.dsum_known_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag s x))) ->
k': LowParse.Spec.Base.parser_kind
-> LowParse.Spec.Base.parser_kind | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Base.glb",
"LowParse.Spec.Base.glb_list_of",
"LowParse.Spec.Sum.dsum_key_type",
"FStar.List.Tot.Base.mem",
"Prims.bool",
"FStar.List.Tot.Base.map",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Sum.dsum_repr_type",
"FStar.Pervasives.Native.fst",
"LowParse.Spec.Sum.dsum_enum",
"Prims.list"
] | [] | false | false | false | false | false | let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(k': parser_kind)
: Tot parser_kind =
| let keys:list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
(glb_list_of #(dsum_key_type s)
(fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then
let (| k , _ |) = f x in
k
else k')
(List.Tot.map fst (dsum_enum s)))
`glb`
k' | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_dsum_type_of_tag' | val parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) | val parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) | let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 86,
"end_line": 645,
"start_col": 0,
"start_line": 636
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.dsum ->
f:
(x: LowParse.Spec.Sum.dsum_known_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag s x))) ->
g: LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_unknown_tag s) ->
x: LowParse.Spec.Sum.dsum_key s
-> LowParse.Spec.Base.parser (LowParse.Spec.Sum.parse_dsum_cases_kind s f g x)
(LowParse.Spec.Sum.dsum_type_of_tag s x) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Spec.Sum.dsum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_key_type",
"LowParse.Spec.Sum.dsum_repr_type",
"LowParse.Spec.Sum.dsum_enum",
"LowParse.Spec.Base.coerce",
"LowParse.Spec.Sum.parse_dsum_cases_kind",
"LowParse.Spec.Sum.dsum_type_of_tag",
"Prims.__proj__Mkdtuple2__item___1",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Enum.unknown_enum_repr"
] | [] | false | false | false | false | false | let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) =
| match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x) | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_dsum_cases' | val parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)) | val parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)) | let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x))
= synth_dsum_case_injective s x;
match x with
| Known x' -> (dsnd (f x') `parse_synth` synth_dsum_case s (Known x')) <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' -> g `parse_synth` synth_dsum_case s (Unknown x') <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 124,
"end_line": 657,
"start_col": 0,
"start_line": 647
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k
let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.dsum ->
f:
(x: LowParse.Spec.Sum.dsum_known_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag s x))) ->
g: LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_unknown_tag s) ->
x: LowParse.Spec.Sum.dsum_key s
-> LowParse.Spec.Base.parser (LowParse.Spec.Sum.parse_dsum_cases_kind s f g x)
(LowParse.Spec.Sum.dsum_cases s x) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Spec.Sum.dsum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_key_type",
"LowParse.Spec.Sum.dsum_repr_type",
"LowParse.Spec.Sum.dsum_enum",
"LowParse.Spec.Combinators.parse_synth",
"Prims.__proj__Mkdtuple2__item___1",
"LowParse.Spec.Sum.dsum_cases",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Sum.synth_dsum_case",
"LowParse.Spec.Enum.Known",
"LowParse.Spec.Sum.parse_dsum_cases_kind",
"LowParse.Spec.Enum.unknown_enum_repr",
"LowParse.Spec.Enum.Unknown",
"Prims.unit",
"LowParse.Spec.Sum.synth_dsum_case_injective"
] | [] | false | false | false | false | false | let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)) =
| synth_dsum_case_injective s x;
match x with
| Known x' ->
((dsnd (f x')) `parse_synth` (synth_dsum_case s (Known x')))
<:
parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' ->
g `parse_synth` (synth_dsum_case s (Unknown x'))
<:
parser (parse_dsum_cases_kind s f g x) (dsum_cases s x) | false |
Vale.AES.GCM.fsti | Vale.AES.GCM.gcm_decrypt_LE_tag | val gcm_decrypt_LE_tag
(alg: algorithm)
(key: seq nat8)
(iv: supported_iv_LE)
(cipher auth: seq nat8)
: Pure (seq nat8)
(requires is_aes_key alg key /\ length cipher < pow2_32 /\ length auth < pow2_32)
(ensures fun t -> True) | val gcm_decrypt_LE_tag
(alg: algorithm)
(key: seq nat8)
(iv: supported_iv_LE)
(cipher auth: seq nat8)
: Pure (seq nat8)
(requires is_aes_key alg key /\ length cipher < pow2_32 /\ length auth < pow2_32)
(ensures fun t -> True) | let gcm_decrypt_LE_tag (alg:algorithm) (key:seq nat8) (iv:supported_iv_LE) (cipher:seq nat8) (auth:seq nat8) :
Pure (seq nat8)
(requires
is_aes_key alg key /\
length cipher < pow2_32 /\
length auth < pow2_32
)
(ensures fun t -> True)
=
let key_LE = seq_nat8_to_seq_nat32_LE key in
let h_LE = aes_encrypt_LE alg key_LE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_LE iv in
let lengths_BE = insert_nat64 (insert_nat64 (Mkfour 0 0 0 0) (8 * length auth) 1) (8 * length cipher) 0 in
let lengths_LE = reverse_bytes_quad32 lengths_BE in
let zero_padded_c_LE = le_bytes_to_seq_quad32 (pad_to_128_bits cipher) in
let zero_padded_a_LE = le_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_LE = append zero_padded_a_LE (append zero_padded_c_LE (create 1 lengths_LE)) in
let s_LE = ghash_LE h_LE hash_input_LE in
let t = gctr_encrypt_LE j0_BE (le_quad32_to_bytes s_LE) alg key_LE in
t | {
"file_name": "vale/code/crypto/aes/Vale.AES.GCM.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 3,
"end_line": 228,
"start_col": 0,
"start_line": 206
} | module Vale.AES.GCM
open Vale.Def.Opaque_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.AES.GCM_s
open Vale.AES.AES_s
open Vale.AES.GCM_helpers
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GHash_s
open FStar.Mul
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open FStar.Calc
open Vale.Def.Words.Four_s
let set_to_one_LE (q:quad32) : quad32 = four_insert q 1 0 // Mkfour 1 q.lo1 q.hi2 q.hi3
let upper3_equal (q0 q1:quad32) : bool =
q0.lo1 = q1.lo1 &&
q0.hi2 = q1.hi2 &&
q0.hi3 = q1.hi3
let lower3_equal (q0 q1:quad32) : bool =
q0.lo0 = q1.lo0 &&
q0.lo1 = q1.lo1 &&
q0.hi2 = q1.hi2
val lemma_compute_iv_easy (iv_b iv_extra_b:seq quad32) (iv:supported_iv_LE) (num_bytes:nat64) (h_LE j0:quad32) : Lemma
(requires
length iv_extra_b == 1 /\
length iv_b * (128/8) <= num_bytes /\ num_bytes < length iv_b * (128/8) + 128/8 /\
num_bytes == 96/8 /\
(let iv_BE = reverse_bytes_quad32 (index iv_extra_b 0) in
j0 == Mkfour 1 iv_BE.lo1 iv_BE.hi2 iv_BE.hi3) /\
(let raw_quads = append iv_b iv_extra_b in
let iv_bytes = slice (le_seq_quad32_to_bytes raw_quads) 0 num_bytes in
iv_bytes == iv))
(ensures j0 == compute_iv_BE h_LE iv)
open Vale.AES.GHash
val lemma_compute_iv_hard (iv:supported_iv_LE) (quads:seq quad32) (length_quad h_LE j0:quad32) : Lemma
(requires
~(length iv == 96/8) /\
quads == le_bytes_to_seq_quad32 (pad_to_128_bits iv) /\
j0 == ghash_incremental h_LE (Mkfour 0 0 0 0) (append quads (create 1 length_quad)) /\
length_quad == reverse_bytes_quad32 (insert_nat64
(insert_nat64
(Mkfour 0 0 0 0) 0 1)
(8 * (length iv)) 0))
(ensures reverse_bytes_quad32 j0 == compute_iv_BE h_LE iv)
val lemma_length_simplifier (s bytes t:seq quad32) (num_bytes:nat) : Lemma
(requires t == (if num_bytes > (length s) * 16 then append s bytes else s) /\
(num_bytes <= (length s) * 16 ==> num_bytes == (length s * 16)) /\
length s * 16 <= num_bytes /\
num_bytes < length s * 16 + 16 /\
length bytes == 1
)
(ensures slice (le_seq_quad32_to_bytes t) 0 num_bytes ==
slice (le_seq_quad32_to_bytes (append s bytes)) 0 num_bytes)
val gcm_blocks_helper_simplified (alg:algorithm) (key:seq nat32)
(a128 a_bytes p128x6 p128 p_bytes c128x6 c128 c_bytes:seq quad32)
(p_num_bytes a_num_bytes:nat)
(iv:supported_iv_LE) (j0_BE h enc_hash length_quad:quad32) : Lemma
(requires // Required by gcm_blocks
length p128x6 * 16 + length p128 * 16 <= p_num_bytes /\
p_num_bytes < length p128x6 * 16 + length p128 * 16 + 16 /\
length a128 * 16 <= a_num_bytes /\
a_num_bytes < length a128 * 16 + 16 /\
a_num_bytes < pow2_32 /\
length p128x6 == length c128x6 /\
length p128 == length c128 /\
length p_bytes == 1 /\
length c_bytes == 1 /\
length a_bytes == 1 /\
is_aes_key_LE alg key /\
j0_BE == compute_iv_BE h iv /\
h = aes_encrypt_LE alg key (Mkfour 0 0 0 0) /\
// Ensured by gcm_blocks
p_num_bytes < pow2_32 /\ a_num_bytes < pow2_32 /\
length_quad == reverse_bytes_quad32
(insert_nat64 (insert_nat64 (Mkfour 0 0 0 0) (8 * a_num_bytes) 1) (8 * p_num_bytes) 0) /\
(let ctr_BE_1:quad32 = j0_BE in
let ctr_BE_2:quad32 = inc32 j0_BE 1 in
let plain:seq quad32 =
if p_num_bytes > (length p128x6 + length p128) * 16 then
append (append p128x6 p128) p_bytes
else
append p128x6 p128
in
let cipher:seq quad32 =
if p_num_bytes > (length p128x6 + length p128) * 16 then
append (append c128x6 c128) c_bytes
else
append c128x6 c128
in
let cipher_bound:nat = length p128x6 + length p128 +
(if p_num_bytes > (length p128x6 + length p128) * 16 then 1 else 0)
in
gctr_partial alg cipher_bound plain cipher key ctr_BE_2 /\
(let auth_raw_quads =
if a_num_bytes > (length a128) * 16 then append a128 a_bytes else a128
in
let auth_input_bytes = slice (le_seq_quad32_to_bytes auth_raw_quads) 0 a_num_bytes in
let auth_padded_bytes = pad_to_128_bits auth_input_bytes in
let auth_quads = le_bytes_to_seq_quad32 auth_padded_bytes in
let raw_quads = append (append auth_quads c128x6) c128 in
let total_bytes = (length auth_quads) * 16 + p_num_bytes in
let raw_quads =
if p_num_bytes > (length p128x6 + length p128) * 16 then
let raw_quads = append raw_quads c_bytes in
let input_bytes = slice (le_seq_quad32_to_bytes raw_quads) 0 total_bytes in
let input_padded_bytes = pad_to_128_bits input_bytes in
le_bytes_to_seq_quad32 input_padded_bytes
else
raw_quads
in
let final_quads = append raw_quads (create 1 length_quad) in
enc_hash == gctr_encrypt_block ctr_BE_1 (ghash_LE h final_quads) alg key 0
)))
(ensures (let auth_raw_quads = append a128 a_bytes in
let auth_bytes = slice (le_seq_quad32_to_bytes auth_raw_quads) 0 a_num_bytes in
let plain_raw_quads = append (append p128x6 p128) p_bytes in
let plain_bytes = slice (le_seq_quad32_to_bytes plain_raw_quads) 0 p_num_bytes in
let cipher_raw_quads = append (append c128x6 c128) c_bytes in
let cipher_bytes = slice (le_seq_quad32_to_bytes cipher_raw_quads) 0 p_num_bytes in
length auth_bytes < pow2_32 /\
length plain_bytes < pow2_32 /\
cipher_bytes == fst (gcm_encrypt_LE alg (seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes) /\
le_quad32_to_bytes enc_hash ==
snd (gcm_encrypt_LE alg (seq_nat32_to_seq_nat8_LE key)
iv plain_bytes auth_bytes))
)
val lemma_gcm_encrypt_decrypt_equiv (alg:algorithm) (key:seq nat32) (iv:supported_iv_LE) (j0_BE:quad32) (plain cipher auth alleged_tag:seq nat8) : Lemma
(requires
is_aes_key_LE alg key /\
(let h_LE = aes_encrypt_LE alg key (Mkfour 0 0 0 0) in
j0_BE = compute_iv_BE h_LE iv) /\
length cipher < pow2_32 /\
length auth < pow2_32 /\
plain == fst (gcm_encrypt_LE alg (seq_nat32_to_seq_nat8_LE key) iv cipher auth)
)
(ensures plain == fst (gcm_decrypt_LE alg (seq_nat32_to_seq_nat8_LE key) iv cipher auth alleged_tag))
val gcm_blocks_helper_dec_simplified (alg:algorithm) (key:seq nat32)
(p128x6 p128 p_bytes c128x6 c128 c_bytes:seq quad32)
(auth_bytes alleged_tag:seq nat8)
(p_num_bytes:nat)
(iv:supported_iv_LE) (j0_BE:quad32) : Lemma
(requires // Required by gcm_blocks
length p128x6 * 16 + length p128 * 16 <= p_num_bytes /\
p_num_bytes < length p128x6 * 16 + length p128 * 16 + 16 /\
length p128x6 == length c128x6 /\
length p128 == length c128 /\
length p_bytes == 1 /\
length c_bytes == 1 /\
(length auth_bytes) < pow2_32 /\
is_aes_key_LE alg key /\
(let h_LE = aes_encrypt_LE alg key (Mkfour 0 0 0 0) in
j0_BE = compute_iv_BE h_LE iv) /\
// Ensured by gcm_blocks
p_num_bytes < pow2_32 /\
(let ctr_BE_2:quad32 = inc32 j0_BE 1 in
let plain:seq quad32 =
if p_num_bytes > (length p128x6 + length p128) * 16 then
append (append p128x6 p128) p_bytes
else
append p128x6 p128
in
let cipher:seq quad32 =
if p_num_bytes > (length p128x6 + length p128) * 16 then
append (append c128x6 c128) c_bytes
else
append c128x6 c128
in
let cipher_bound:nat = length p128x6 + length p128 +
(if p_num_bytes > (length p128x6 + length p128) * 16 then 1 else 0)
in
gctr_partial alg cipher_bound plain cipher key ctr_BE_2
))
(ensures (let plain_raw_quads = append (append p128x6 p128) p_bytes in
let plain_bytes = slice (le_seq_quad32_to_bytes plain_raw_quads) 0 p_num_bytes in
let cipher_raw_quads = append (append c128x6 c128) c_bytes in
let cipher_bytes = slice (le_seq_quad32_to_bytes cipher_raw_quads) 0 p_num_bytes in
length auth_bytes < pow2_32 /\
length plain_bytes < pow2_32 /\
cipher_bytes == fst (gcm_decrypt_LE alg (seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes alleged_tag))) | {
"checked_file": "/",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Calc",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.Def.Words_s.nat8 ->
iv: Vale.AES.GCM_s.supported_iv_LE ->
cipher: FStar.Seq.Base.seq Vale.Def.Words_s.nat8 ->
auth: FStar.Seq.Base.seq Vale.Def.Words_s.nat8
-> Prims.Pure (FStar.Seq.Base.seq Vale.Def.Words_s.nat8) | Prims.Pure | [] | [] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.Def.Words_s.nat8",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.AES.GCTR_s.gctr_encrypt_LE",
"Vale.Def.Types_s.le_quad32_to_bytes",
"Vale.Def.Types_s.quad32",
"Vale.AES.GHash_s.ghash_LE",
"FStar.Seq.Base.append",
"FStar.Seq.Base.create",
"Vale.Def.Types_s.le_bytes_to_seq_quad32",
"Vale.AES.GCTR_s.pad_to_128_bits",
"Vale.Def.Types_s.reverse_bytes_quad32",
"Vale.Def.Types_s.insert_nat64",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.nat32",
"FStar.Mul.op_Star",
"FStar.Seq.Base.length",
"Vale.AES.GCM_s.compute_iv_BE",
"Vale.AES.AES_s.aes_encrypt_LE",
"Vale.Def.Words_s.nat32",
"Vale.Def.Words.Seq_s.seq_nat8_to_seq_nat32_LE",
"Prims.l_and",
"Vale.AES.AES_common_s.is_aes_key",
"Prims.b2t",
"Prims.op_LessThan",
"Vale.Def.Words_s.pow2_32",
"Prims.l_True"
] | [] | false | false | false | false | false | let gcm_decrypt_LE_tag
(alg: algorithm)
(key: seq nat8)
(iv: supported_iv_LE)
(cipher auth: seq nat8)
: Pure (seq nat8)
(requires is_aes_key alg key /\ length cipher < pow2_32 /\ length auth < pow2_32)
(ensures fun t -> True) =
| let key_LE = seq_nat8_to_seq_nat32_LE key in
let h_LE = aes_encrypt_LE alg key_LE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_LE iv in
let lengths_BE =
insert_nat64 (insert_nat64 (Mkfour 0 0 0 0) (8 * length auth) 1) (8 * length cipher) 0
in
let lengths_LE = reverse_bytes_quad32 lengths_BE in
let zero_padded_c_LE = le_bytes_to_seq_quad32 (pad_to_128_bits cipher) in
let zero_padded_a_LE = le_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_LE = append zero_padded_a_LE (append zero_padded_c_LE (create 1 lengths_LE)) in
let s_LE = ghash_LE h_LE hash_input_LE in
let t = gctr_encrypt_LE j0_BE (le_quad32_to_bytes s_LE) alg key_LE in
t | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.serialize_dsum' | val serialize_dsum'
(#kt: parser_kind)
(t: dsum)
(#p: parser kt (dsum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: (x: dsum_key t -> Tot (parser k (dsum_cases t x))))
(sc: (x: dsum_key t -> Tot (serializer (pc x))))
: Pure (serializer (parse_dsum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | val serialize_dsum'
(#kt: parser_kind)
(t: dsum)
(#p: parser kt (dsum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: (x: dsum_key t -> Tot (parser k (dsum_cases t x))))
(sc: (x: dsum_key t -> Tot (serializer (pc x))))
: Pure (serializer (parse_dsum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | let serialize_dsum'
(#kt: parser_kind)
(t: dsum)
(#p: parser kt (dsum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: dsum_key t) -> Tot (parser k (dsum_cases t x))))
(sc: ((x: dsum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_dsum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(kt)
#(dsum_key t)
#(parse_maybe_enum_key p (dsum_enum t))
(serialize_maybe_enum_key p s (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
#pc
sc | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 6,
"end_line": 906,
"start_col": 0,
"start_line": 886
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k
let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)
let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x))
= synth_dsum_case_injective s x;
match x with
| Known x' -> (dsnd (f x') `parse_synth` synth_dsum_case s (Known x')) <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' -> g `parse_synth` synth_dsum_case s (Unknown x') <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
let parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma
(parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input)
= synth_dsum_case_injective s x;
match x with
| Known x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x'))) (synth_dsum_case s x) input;
parse_synth_eq (dsnd (f x')) (synth_dsum_case s (Known x')) input
| Unknown x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) g) (synth_dsum_case s x) input;
parse_synth_eq g (synth_dsum_case s (Unknown x')) input
let parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: ((x: dsum_key t) -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t))
= parse_tagged_union
#kt
#(dsum_key t)
(parse_maybe_enum_key p (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_dsum_kind
(kt: parser_kind)
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k: parser_kind)
: Tot parser_kind
= and_then_kind kt (weaken_parse_dsum_cases_kind s f k)
let parse_dsum
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag t))
: Tot (parser (parse_dsum_kind kt t f k) (dsum_type t))
= parse_dsum' t p (parse_dsum_cases t f g)
let parse_dsum_eq''
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq #(kt) #(dsum_key t) (parse_maybe_enum_key p (dsum_enum t)) #(dsum_type t) (dsum_tag_of_data t) (parse_dsum_cases t f g) input;
parse_synth_eq p (maybe_enum_key_of_repr (dsum_enum t)) input
let parse_dsum_eq_
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen (parse_maybe_enum_key p (dsum_enum t)) (dsum_tag_of_data t) (parse_dsum_cases t f g) (parse_maybe_enum_key p (dsum_enum t)) (fun input -> ()) (parse_dsum_cases_kind t f g) (parse_dsum_cases' t f g) (fun tg input -> parse_dsum_cases_eq' t f g tg input) input
let parse_dsum_eq'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_dsum_eq_ t p f g input;
parse_maybe_enum_key_eq p (dsum_enum t) input
let parse_dsum_eq
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match k with
| Known k' ->
begin match parse (dsnd (f k')) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
| Unknown k' ->
begin match parse g input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
end
))
= parse_dsum_eq_ t p f g input;
let j = parse (parse_maybe_enum_key p (dsum_enum t)) input in
match j with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_dsum_case_injective t k;
begin match k with
| Known k_ ->
parse_synth_eq (dsnd (f k_)) (synth_dsum_case t k) input_k;
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind t f k') (dsnd (f k_))) (synth_dsum_case t k) input_k
| Unknown k_ ->
parse_synth_eq g (synth_dsum_case t k) input_k;
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind t f k') g) (synth_dsum_case t k) input_k
end
let parse_dsum_eq3
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (r, consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) r in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_type_of_tag' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_dsum_eq t p f g input;
parse_maybe_enum_key_eq p (dsum_enum t) input
let synth_dsum_case_inverse
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_inverse (synth_dsum_case s x) (synth_dsum_case_recip s x))
= let f
(y: refine_with_tag (dsum_tag_of_data s) (x))
: Lemma
(synth_dsum_case s x (synth_dsum_case_recip s x y) == y)
= DSum?.synth_case_synth_case_recip s y
in
Classical.forall_intro f
let serialize_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(sr: (x: dsum_known_key s) -> Tot (serializer (dsnd (f x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(sg: serializer g)
(x: dsum_key s)
: Tot (serializer (parse_dsum_type_of_tag s f g x))
= match x with
| Known x' ->
serialize_ext (dsnd (f x')) (sr x') (parse_dsum_type_of_tag s f g x)
| Unknown x' ->
serialize_ext g sg (parse_dsum_type_of_tag s f g x)
let serialize_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(sr: (x: dsum_known_key s) -> Tot (serializer (dsnd (f x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(sg: serializer g)
(x: dsum_key s)
: Tot (serializer (parse_dsum_cases s f g x))
= synth_dsum_case_injective s x;
synth_dsum_case_inverse s x;
serialize_synth
_
(synth_dsum_case s x)
(serialize_dsum_type_of_tag s f sr g sg x)
(synth_dsum_case_recip s x)
() | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.dsum ->
s: LowParse.Spec.Base.serializer p ->
sc: (x: LowParse.Spec.Sum.dsum_key t -> LowParse.Spec.Base.serializer (pc x))
-> Prims.Pure (LowParse.Spec.Base.serializer (LowParse.Spec.Sum.parse_dsum' t p pc)) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_repr_type",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Sum.dsum_key",
"LowParse.Spec.Sum.dsum_cases",
"LowParse.Spec.Combinators.serialize_tagged_union",
"LowParse.Spec.Enum.parse_maybe_enum_key",
"LowParse.Spec.Sum.dsum_key_type",
"LowParse.Spec.Sum.dsum_enum",
"LowParse.Spec.Enum.serialize_maybe_enum_key",
"LowParse.Spec.Sum.dsum_type",
"LowParse.Spec.Sum.dsum_tag_of_data",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Sum.parse_dsum'",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"Prims.l_True"
] | [] | false | false | false | false | false | let serialize_dsum'
(#kt: parser_kind)
(t: dsum)
(#p: parser kt (dsum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: (x: dsum_key t -> Tot (parser k (dsum_cases t x))))
(sc: (x: dsum_key t -> Tot (serializer (pc x))))
: Pure (serializer (parse_dsum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) =
| serialize_tagged_union #(kt)
#(dsum_key t)
#(parse_maybe_enum_key p (dsum_enum t))
(serialize_maybe_enum_key p s (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
#pc
sc | false |
CSL.Semantics.fst | CSL.Semantics.commutative | val commutative : equals: (_: a -> _: a -> Prims.prop) -> f: (_: a -> _: a -> a) -> Prims.logical | let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 24,
"end_line": 62,
"start_col": 0,
"start_line": 60
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | equals: (_: a -> _: a -> Prims.prop) -> f: (_: a -> _: a -> a) -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Prims.prop",
"Prims.l_Forall",
"Prims.logical"
] | [] | false | false | false | true | true | let commutative #a (equals: (a -> a -> prop)) (f: (a -> a -> a)) =
| forall x y. {:pattern f x y} (f x y) `equals` (f y x) | false |
|
CSL.Semantics.fst | CSL.Semantics.associative | val associative : equals: (_: a -> _: a -> Prims.prop) -> f: (_: a -> _: a -> a) -> Prims.logical | let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 36,
"end_line": 58,
"start_col": 0,
"start_line": 56
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | equals: (_: a -> _: a -> Prims.prop) -> f: (_: a -> _: a -> a) -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Prims.prop",
"Prims.l_Forall",
"Prims.logical"
] | [] | false | false | false | true | true | let associative #a (equals: (a -> a -> prop)) (f: (a -> a -> a)) =
| forall x y z. (f x (f y z)) `equals` (f (f x y) z) | false |
|
CSL.Semantics.fst | CSL.Semantics.symmetry | val symmetry : equals: (_: a -> _: a -> Prims.prop) -> Prims.logical | let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 33,
"end_line": 51,
"start_col": 0,
"start_line": 49
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here. | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | equals: (_: a -> _: a -> Prims.prop) -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Prims.prop",
"Prims.l_Forall",
"Prims.l_imp",
"Prims.logical"
] | [] | false | false | false | true | true | let symmetry #a (equals: (a -> a -> prop)) =
| forall x y. {:pattern (x `equals` y)} x `equals` y ==> y `equals` x | false |
|
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_dsum_cases | val parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x)) | val parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x)) | let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 66,
"end_line": 623,
"start_col": 0,
"start_line": 615
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.dsum ->
f:
(x: LowParse.Spec.Sum.dsum_known_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag s x))) ->
g: LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_unknown_tag s) ->
x: LowParse.Spec.Sum.dsum_key s
-> LowParse.Spec.Base.parser (LowParse.Spec.Sum.weaken_parse_dsum_cases_kind s f k)
(LowParse.Spec.Sum.dsum_cases s x) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Spec.Sum.dsum_key",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Sum.weaken_parse_dsum_cases_kind",
"LowParse.Spec.Sum.dsum_type_of_tag",
"LowParse.Spec.Sum.dsum_cases",
"LowParse.Spec.Sum.parse_dsum_type_of_tag",
"LowParse.Spec.Sum.synth_dsum_case",
"Prims.unit",
"LowParse.Spec.Sum.synth_dsum_case_injective"
] | [] | false | false | false | false | false | let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x)) =
| synth_dsum_case_injective s x;
(parse_dsum_type_of_tag s f g x) `parse_synth` (synth_dsum_case s x) | false |
CSL.Semantics.fst | CSL.Semantics.equals_ext | val equals_ext : equals: (_: a -> _: a -> Prims.prop) -> f: (_: a -> _: a -> a) -> Prims.logical | let equals_ext #a (equals:a -> a -> prop) (f:a -> a -> a) =
forall x1 x2 y. x1 `equals` x2 ==> f x1 y `equals` f x2 y | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 59,
"end_line": 70,
"start_col": 0,
"start_line": 69
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x
let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | equals: (_: a -> _: a -> Prims.prop) -> f: (_: a -> _: a -> a) -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Prims.prop",
"Prims.l_Forall",
"Prims.l_imp",
"Prims.logical"
] | [] | false | false | false | true | true | let equals_ext #a (equals: (a -> a -> prop)) (f: (a -> a -> a)) =
| forall x1 x2 y. x1 `equals` x2 ==> (f x1 y) `equals` (f x2 y) | false |
|
CSL.Semantics.fst | CSL.Semantics.transitive | val transitive : equals: (_: a -> _: a -> Prims.prop) -> Prims.logical | let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 61,
"end_line": 54,
"start_col": 0,
"start_line": 53
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | equals: (_: a -> _: a -> Prims.prop) -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Prims.prop",
"Prims.l_Forall",
"Prims.l_imp",
"Prims.l_and",
"Prims.logical"
] | [] | false | false | false | true | true | let transitive #a (equals: (a -> a -> prop)) =
| forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z | false |
|
CSL.Semantics.fst | CSL.Semantics.interp_extensionality | val interp_extensionality : equals: (_: r -> _: r -> Prims.prop) -> f: (_: r -> _: s -> Prims.prop) -> Prims.logical | let interp_extensionality #r #s (equals:r -> r -> prop) (f:r -> s -> prop) =
forall x y h. {:pattern equals x y; f x h} equals x y /\ f x h ==> f y h | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 74,
"end_line": 92,
"start_col": 0,
"start_line": 91
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x
let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y
let equals_ext #a (equals:a -> a -> prop) (f:a -> a -> a) =
forall x1 x2 y. x1 `equals` x2 ==> f x1 y `equals` f x2 y
noeq
type st0 = {
mem:Type u#2;
evolves:P.preorder mem;
hprop:Type u#2;
invariant: mem -> hprop;
interp: hprop -> mem -> prop;
emp:hprop;
star: hprop -> hprop -> hprop;
equals: hprop -> hprop -> prop;
}
//////////////////////////////////////////////////////////////////////////////// | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | equals: (_: r -> _: r -> Prims.prop) -> f: (_: r -> _: s -> Prims.prop) -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Prims.prop",
"Prims.l_Forall",
"Prims.l_imp",
"Prims.l_and",
"Prims.logical"
] | [] | false | false | false | true | true | let interp_extensionality #r #s (equals: (r -> r -> prop)) (f: (r -> s -> prop)) =
| forall x y h. {:pattern equals x y; f x h} equals x y /\ f x h ==> f y h | false |
|
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_dsum_case_recip | val synth_dsum_case_recip (s: dsum)
: Tot (x: dsum_key s -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x)) | val synth_dsum_case_recip (s: dsum)
: Tot (x: dsum_key s -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x)) | let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 76,
"end_line": 572,
"start_col": 0,
"start_line": 569
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.dsum ->
x: LowParse.Spec.Sum.dsum_key s ->
_: LowParse.Spec.Base.refine_with_tag (LowParse.Spec.Sum.dsum_tag_of_data s) x
-> LowParse.Spec.Sum.dsum_type_of_tag s x | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_type_of_tag'",
"LowParse.Spec.Base.refine_with_tag",
"Prims.squash",
"Prims.eq2",
"LowParse.Spec.Sum.dsum_key",
"LowParse.Spec.Sum.dsum_type",
"LowParse.Spec.Sum.dsum_tag_of_data",
"LowParse.Spec.Sum.dsum_type_of_tag"
] | [] | false | false | false | false | false | let synth_dsum_case_recip (s: dsum)
: Tot (x: dsum_key s -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x)) =
| match s with | DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_dsum_case_inverse | val synth_dsum_case_inverse (s: dsum) (x: dsum_key s)
: Lemma (synth_inverse (synth_dsum_case s x) (synth_dsum_case_recip s x)) | val synth_dsum_case_inverse (s: dsum) (x: dsum_key s)
: Lemma (synth_inverse (synth_dsum_case s x) (synth_dsum_case_recip s x)) | let synth_dsum_case_inverse
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_inverse (synth_dsum_case s x) (synth_dsum_case_recip s x))
= let f
(y: refine_with_tag (dsum_tag_of_data s) (x))
: Lemma
(synth_dsum_case s x (synth_dsum_case_recip s x y) == y)
= DSum?.synth_case_synth_case_recip s y
in
Classical.forall_intro f | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 26,
"end_line": 851,
"start_col": 0,
"start_line": 840
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k
let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)
let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x))
= synth_dsum_case_injective s x;
match x with
| Known x' -> (dsnd (f x') `parse_synth` synth_dsum_case s (Known x')) <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' -> g `parse_synth` synth_dsum_case s (Unknown x') <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
let parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma
(parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input)
= synth_dsum_case_injective s x;
match x with
| Known x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x'))) (synth_dsum_case s x) input;
parse_synth_eq (dsnd (f x')) (synth_dsum_case s (Known x')) input
| Unknown x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) g) (synth_dsum_case s x) input;
parse_synth_eq g (synth_dsum_case s (Unknown x')) input
let parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: ((x: dsum_key t) -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t))
= parse_tagged_union
#kt
#(dsum_key t)
(parse_maybe_enum_key p (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_dsum_kind
(kt: parser_kind)
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k: parser_kind)
: Tot parser_kind
= and_then_kind kt (weaken_parse_dsum_cases_kind s f k)
let parse_dsum
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag t))
: Tot (parser (parse_dsum_kind kt t f k) (dsum_type t))
= parse_dsum' t p (parse_dsum_cases t f g)
let parse_dsum_eq''
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq #(kt) #(dsum_key t) (parse_maybe_enum_key p (dsum_enum t)) #(dsum_type t) (dsum_tag_of_data t) (parse_dsum_cases t f g) input;
parse_synth_eq p (maybe_enum_key_of_repr (dsum_enum t)) input
let parse_dsum_eq_
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen (parse_maybe_enum_key p (dsum_enum t)) (dsum_tag_of_data t) (parse_dsum_cases t f g) (parse_maybe_enum_key p (dsum_enum t)) (fun input -> ()) (parse_dsum_cases_kind t f g) (parse_dsum_cases' t f g) (fun tg input -> parse_dsum_cases_eq' t f g tg input) input
let parse_dsum_eq'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_dsum_eq_ t p f g input;
parse_maybe_enum_key_eq p (dsum_enum t) input
let parse_dsum_eq
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match k with
| Known k' ->
begin match parse (dsnd (f k')) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
| Unknown k' ->
begin match parse g input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
end
))
= parse_dsum_eq_ t p f g input;
let j = parse (parse_maybe_enum_key p (dsum_enum t)) input in
match j with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_dsum_case_injective t k;
begin match k with
| Known k_ ->
parse_synth_eq (dsnd (f k_)) (synth_dsum_case t k) input_k;
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind t f k') (dsnd (f k_))) (synth_dsum_case t k) input_k
| Unknown k_ ->
parse_synth_eq g (synth_dsum_case t k) input_k;
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind t f k') g) (synth_dsum_case t k) input_k
end
let parse_dsum_eq3
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (r, consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) r in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_type_of_tag' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_dsum_eq t p f g input;
parse_maybe_enum_key_eq p (dsum_enum t) input | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: LowParse.Spec.Sum.dsum -> x: LowParse.Spec.Sum.dsum_key s
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Combinators.synth_inverse (LowParse.Spec.Sum.synth_dsum_case s x)
(LowParse.Spec.Sum.synth_dsum_case_recip s x)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_key",
"FStar.Classical.forall_intro",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Sum.dsum_type",
"LowParse.Spec.Sum.dsum_tag_of_data",
"Prims.eq2",
"LowParse.Spec.Sum.synth_dsum_case",
"LowParse.Spec.Sum.synth_dsum_case_recip",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Sum.__proj__DSum__item__synth_case_synth_case_recip",
"LowParse.Spec.Combinators.synth_inverse",
"LowParse.Spec.Sum.dsum_type_of_tag"
] | [] | false | false | true | false | false | let synth_dsum_case_inverse (s: dsum) (x: dsum_key s)
: Lemma (synth_inverse (synth_dsum_case s x) (synth_dsum_case_recip s x)) =
| let f (y: refine_with_tag (dsum_tag_of_data s) (x))
: Lemma (synth_dsum_case s x (synth_dsum_case_recip s x y) == y) =
DSum?.synth_case_synth_case_recip s y
in
Classical.forall_intro f | false |
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.serialize_dsum_cases | val serialize_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(sr: (x: dsum_known_key s -> Tot (serializer (dsnd (f x)))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(sg: serializer g)
(x: dsum_key s)
: Tot (serializer (parse_dsum_cases s f g x)) | val serialize_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(sr: (x: dsum_known_key s -> Tot (serializer (dsnd (f x)))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(sg: serializer g)
(x: dsum_key s)
: Tot (serializer (parse_dsum_cases s f g x)) | let serialize_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(sr: (x: dsum_known_key s) -> Tot (serializer (dsnd (f x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(sg: serializer g)
(x: dsum_key s)
: Tot (serializer (parse_dsum_cases s f g x))
= synth_dsum_case_injective s x;
synth_dsum_case_inverse s x;
serialize_synth
_
(synth_dsum_case s x)
(serialize_dsum_type_of_tag s f sr g sg x)
(synth_dsum_case_recip s x)
() | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 6,
"end_line": 884,
"start_col": 0,
"start_line": 868
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k
let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)
let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x))
= synth_dsum_case_injective s x;
match x with
| Known x' -> (dsnd (f x') `parse_synth` synth_dsum_case s (Known x')) <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' -> g `parse_synth` synth_dsum_case s (Unknown x') <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
let parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma
(parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input)
= synth_dsum_case_injective s x;
match x with
| Known x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x'))) (synth_dsum_case s x) input;
parse_synth_eq (dsnd (f x')) (synth_dsum_case s (Known x')) input
| Unknown x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) g) (synth_dsum_case s x) input;
parse_synth_eq g (synth_dsum_case s (Unknown x')) input
let parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: ((x: dsum_key t) -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t))
= parse_tagged_union
#kt
#(dsum_key t)
(parse_maybe_enum_key p (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_dsum_kind
(kt: parser_kind)
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k: parser_kind)
: Tot parser_kind
= and_then_kind kt (weaken_parse_dsum_cases_kind s f k)
let parse_dsum
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag t))
: Tot (parser (parse_dsum_kind kt t f k) (dsum_type t))
= parse_dsum' t p (parse_dsum_cases t f g)
let parse_dsum_eq''
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq #(kt) #(dsum_key t) (parse_maybe_enum_key p (dsum_enum t)) #(dsum_type t) (dsum_tag_of_data t) (parse_dsum_cases t f g) input;
parse_synth_eq p (maybe_enum_key_of_repr (dsum_enum t)) input
let parse_dsum_eq_
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen (parse_maybe_enum_key p (dsum_enum t)) (dsum_tag_of_data t) (parse_dsum_cases t f g) (parse_maybe_enum_key p (dsum_enum t)) (fun input -> ()) (parse_dsum_cases_kind t f g) (parse_dsum_cases' t f g) (fun tg input -> parse_dsum_cases_eq' t f g tg input) input
let parse_dsum_eq'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_dsum_eq_ t p f g input;
parse_maybe_enum_key_eq p (dsum_enum t) input
let parse_dsum_eq
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match k with
| Known k' ->
begin match parse (dsnd (f k')) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
| Unknown k' ->
begin match parse g input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
end
))
= parse_dsum_eq_ t p f g input;
let j = parse (parse_maybe_enum_key p (dsum_enum t)) input in
match j with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_dsum_case_injective t k;
begin match k with
| Known k_ ->
parse_synth_eq (dsnd (f k_)) (synth_dsum_case t k) input_k;
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind t f k') (dsnd (f k_))) (synth_dsum_case t k) input_k
| Unknown k_ ->
parse_synth_eq g (synth_dsum_case t k) input_k;
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind t f k') g) (synth_dsum_case t k) input_k
end
let parse_dsum_eq3
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (r, consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) r in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_type_of_tag' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_dsum_eq t p f g input;
parse_maybe_enum_key_eq p (dsum_enum t) input
let synth_dsum_case_inverse
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_inverse (synth_dsum_case s x) (synth_dsum_case_recip s x))
= let f
(y: refine_with_tag (dsum_tag_of_data s) (x))
: Lemma
(synth_dsum_case s x (synth_dsum_case_recip s x y) == y)
= DSum?.synth_case_synth_case_recip s y
in
Classical.forall_intro f
let serialize_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(sr: (x: dsum_known_key s) -> Tot (serializer (dsnd (f x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(sg: serializer g)
(x: dsum_key s)
: Tot (serializer (parse_dsum_type_of_tag s f g x))
= match x with
| Known x' ->
serialize_ext (dsnd (f x')) (sr x') (parse_dsum_type_of_tag s f g x)
| Unknown x' ->
serialize_ext g sg (parse_dsum_type_of_tag s f g x) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.dsum ->
f:
(x: LowParse.Spec.Sum.dsum_known_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag s x))) ->
sr:
(x: LowParse.Spec.Sum.dsum_known_key s
-> LowParse.Spec.Base.serializer (FStar.Pervasives.dsnd (f x))) ->
g: LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_unknown_tag s) ->
sg: LowParse.Spec.Base.serializer g ->
x: LowParse.Spec.Sum.dsum_key s
-> LowParse.Spec.Base.serializer (LowParse.Spec.Sum.parse_dsum_cases s f g x) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Base.serializer",
"Prims.__proj__Mkdtuple2__item___1",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Spec.Sum.dsum_key",
"LowParse.Spec.Combinators.serialize_synth",
"LowParse.Spec.Sum.weaken_parse_dsum_cases_kind",
"LowParse.Spec.Sum.dsum_type_of_tag",
"LowParse.Spec.Sum.dsum_cases",
"LowParse.Spec.Sum.parse_dsum_type_of_tag",
"LowParse.Spec.Sum.synth_dsum_case",
"LowParse.Spec.Sum.serialize_dsum_type_of_tag",
"LowParse.Spec.Sum.synth_dsum_case_recip",
"Prims.unit",
"LowParse.Spec.Sum.synth_dsum_case_inverse",
"LowParse.Spec.Sum.synth_dsum_case_injective",
"LowParse.Spec.Sum.parse_dsum_cases"
] | [] | false | false | false | false | false | let serialize_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(sr: (x: dsum_known_key s -> Tot (serializer (dsnd (f x)))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(sg: serializer g)
(x: dsum_key s)
: Tot (serializer (parse_dsum_cases s f g x)) =
| synth_dsum_case_injective s x;
synth_dsum_case_inverse s x;
serialize_synth _
(synth_dsum_case s x)
(serialize_dsum_type_of_tag s f sr g sg x)
(synth_dsum_case_recip s x)
() | false |
CSL.Semantics.fst | CSL.Semantics.is_unit | val is_unit : x: a -> equals: (_: a -> _: a -> Prims.prop) -> f: (_: a -> _: a -> a) -> Prims.logical | let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 20,
"end_line": 67,
"start_col": 0,
"start_line": 64
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: a -> equals: (_: a -> _: a -> Prims.prop) -> f: (_: a -> _: a -> a) -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"Prims.prop",
"Prims.l_Forall",
"Prims.l_and",
"Prims.logical"
] | [] | false | false | false | true | true | let is_unit #a (x: a) (equals: (a -> a -> prop)) (f: (a -> a -> a)) =
| forall y. {:pattern f x y\/f y x} (f x y) `equals` y /\ (f y x) `equals` y | false |
|
CSL.Semantics.fst | CSL.Semantics.action_t | val action_t : pre: Mkst0?.hprop st -> post: CSL.Semantics.post_t st a -> Type0 | let action_t
(#st:st)
(#a:Type)
(pre:st.hprop)
(post:post_t st a)
= unit ->
Mst a
(requires fun m0 -> st.interp (pre `st.star` st.invariant m0) m0)
(ensures fun m0 x m1 ->
st.interp ((post x) `st.star` st.invariant m1) m1 /\
preserves_frame pre (post x) m0 m1) | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 39,
"end_line": 154,
"start_col": 0,
"start_line": 144
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x
let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y
let equals_ext #a (equals:a -> a -> prop) (f:a -> a -> a) =
forall x1 x2 y. x1 `equals` x2 ==> f x1 y `equals` f x2 y
noeq
type st0 = {
mem:Type u#2;
evolves:P.preorder mem;
hprop:Type u#2;
invariant: mem -> hprop;
interp: hprop -> mem -> prop;
emp:hprop;
star: hprop -> hprop -> hprop;
equals: hprop -> hprop -> prop;
}
////////////////////////////////////////////////////////////////////////////////
let interp_extensionality #r #s (equals:r -> r -> prop) (f:r -> s -> prop) =
forall x y h. {:pattern equals x y; f x h} equals x y /\ f x h ==> f y h
let affine (st:st0) =
forall r0 r1 s. {:pattern (st.interp (r0 `st.star` r1) s) }
st.interp (r0 `st.star` r1) s ==> st.interp r0 s
////////////////////////////////////////////////////////////////////////////////
let st_laws (st:st0) =
(* standard laws about the equality relation *)
symmetry st.equals /\
transitive st.equals /\
interp_extensionality st.equals st.interp /\
(* standard laws for star forming a CM *)
associative st.equals st.star /\
commutative st.equals st.star /\
is_unit st.emp st.equals st.star /\
equals_ext st.equals st.star /\
(* We're working in an affine interpretation of SL *)
affine st
let st = s:st0 { st_laws s }
(**** End state defn ****)
(**** Begin expects, provides defns ****)
/// expects (the heap assertion expected by a computation) is simply an st.hprop
///
/// provides, or the post heap assertion, is a st.hprop on [a]-typed result
type post_t (st:st) (a:Type) = a -> st.hprop
(**** End expects, provides defns ****)
effect Mst (a:Type) (#st:st) (req:st.mem -> Type0) (ens:st.mem -> a -> st.mem -> Type0) =
NMSTATE a st.mem st.evolves req ens
(**** Begin interface of actions ****)
/// Actions are essentially state transformers that preserve frames
let preserves_frame (#st:st) (pre post:st.hprop) (m0 m1:st.mem) =
forall (frame:st.hprop).
st.interp ((pre `st.star` frame) `st.star` (st.invariant m0)) m0 ==>
st.interp ((post `st.star` frame) `st.star` (st.invariant m1)) m1 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | pre: Mkst0?.hprop st -> post: CSL.Semantics.post_t st a -> Type0 | Prims.Tot | [
"total"
] | [] | [
"CSL.Semantics.st",
"CSL.Semantics.__proj__Mkst0__item__hprop",
"CSL.Semantics.post_t",
"Prims.unit",
"CSL.Semantics.__proj__Mkst0__item__mem",
"CSL.Semantics.__proj__Mkst0__item__interp",
"CSL.Semantics.__proj__Mkst0__item__star",
"CSL.Semantics.__proj__Mkst0__item__invariant",
"Prims.l_and",
"CSL.Semantics.preserves_frame"
] | [] | false | false | false | false | true | let action_t (#st: st) (#a: Type) (pre: st.hprop) (post: post_t st a) =
| unit
-> Mst a
(requires fun m0 -> st.interp (pre `st.star` (st.invariant m0)) m0)
(ensures
fun m0 x m1 ->
st.interp ((post x) `st.star` (st.invariant m1)) m1 /\ preserves_frame pre (post x) m0 m1) | false |
|
CSL.Semantics.fst | CSL.Semantics.st_laws | val st_laws : st: CSL.Semantics.st0 -> Prims.logical | let st_laws (st:st0) =
(* standard laws about the equality relation *)
symmetry st.equals /\
transitive st.equals /\
interp_extensionality st.equals st.interp /\
(* standard laws for star forming a CM *)
associative st.equals st.star /\
commutative st.equals st.star /\
is_unit st.emp st.equals st.star /\
equals_ext st.equals st.star /\
(* We're working in an affine interpretation of SL *)
affine st | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 11,
"end_line": 111,
"start_col": 0,
"start_line": 100
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x
let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y
let equals_ext #a (equals:a -> a -> prop) (f:a -> a -> a) =
forall x1 x2 y. x1 `equals` x2 ==> f x1 y `equals` f x2 y
noeq
type st0 = {
mem:Type u#2;
evolves:P.preorder mem;
hprop:Type u#2;
invariant: mem -> hprop;
interp: hprop -> mem -> prop;
emp:hprop;
star: hprop -> hprop -> hprop;
equals: hprop -> hprop -> prop;
}
////////////////////////////////////////////////////////////////////////////////
let interp_extensionality #r #s (equals:r -> r -> prop) (f:r -> s -> prop) =
forall x y h. {:pattern equals x y; f x h} equals x y /\ f x h ==> f y h
let affine (st:st0) =
forall r0 r1 s. {:pattern (st.interp (r0 `st.star` r1) s) }
st.interp (r0 `st.star` r1) s ==> st.interp r0 s
//////////////////////////////////////////////////////////////////////////////// | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | st: CSL.Semantics.st0 -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"CSL.Semantics.st0",
"Prims.l_and",
"CSL.Semantics.symmetry",
"CSL.Semantics.__proj__Mkst0__item__hprop",
"CSL.Semantics.__proj__Mkst0__item__equals",
"CSL.Semantics.transitive",
"CSL.Semantics.interp_extensionality",
"CSL.Semantics.__proj__Mkst0__item__mem",
"CSL.Semantics.__proj__Mkst0__item__interp",
"CSL.Semantics.associative",
"CSL.Semantics.__proj__Mkst0__item__star",
"CSL.Semantics.commutative",
"CSL.Semantics.is_unit",
"CSL.Semantics.__proj__Mkst0__item__emp",
"CSL.Semantics.equals_ext",
"CSL.Semantics.affine",
"Prims.logical"
] | [] | false | false | false | true | true | let st_laws (st: st0) =
| symmetry st.equals /\ transitive st.equals /\ interp_extensionality st.equals st.interp /\
associative st.equals st.star /\ commutative st.equals st.star /\ is_unit st.emp st.equals st.star /\
equals_ext st.equals st.star /\ affine st | false |
|
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.synth_dsum_case_recip_synth_case_unknown_post | val synth_dsum_case_recip_synth_case_unknown_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case:
(x: maybe_enum_key e -> y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x
-> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip:
(k: maybe_enum_key e -> (refine_with_tag tag_of_data k)
-> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(x: repr)
: GTot Type0 | val synth_dsum_case_recip_synth_case_unknown_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case:
(x: maybe_enum_key e -> y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x
-> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip:
(k: maybe_enum_key e -> (refine_with_tag tag_of_data k)
-> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(x: repr)
: GTot Type0 | let synth_dsum_case_recip_synth_case_unknown_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(x: repr)
: GTot Type0
=
list_mem x (list_map snd e) == false ==> (
forall (y: type_of_unknown_tag) . {:pattern (synth_case_recip (Unknown x) (synth_case (Unknown x) y))}
synth_case_recip (Unknown x) (synth_case (Unknown x) y) == y
) | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 955,
"start_col": 0,
"start_line": 940
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k
let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)
let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x))
= synth_dsum_case_injective s x;
match x with
| Known x' -> (dsnd (f x') `parse_synth` synth_dsum_case s (Known x')) <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' -> g `parse_synth` synth_dsum_case s (Unknown x') <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
let parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma
(parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input)
= synth_dsum_case_injective s x;
match x with
| Known x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x'))) (synth_dsum_case s x) input;
parse_synth_eq (dsnd (f x')) (synth_dsum_case s (Known x')) input
| Unknown x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) g) (synth_dsum_case s x) input;
parse_synth_eq g (synth_dsum_case s (Unknown x')) input
let parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: ((x: dsum_key t) -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t))
= parse_tagged_union
#kt
#(dsum_key t)
(parse_maybe_enum_key p (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_dsum_kind
(kt: parser_kind)
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k: parser_kind)
: Tot parser_kind
= and_then_kind kt (weaken_parse_dsum_cases_kind s f k)
let parse_dsum
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag t))
: Tot (parser (parse_dsum_kind kt t f k) (dsum_type t))
= parse_dsum' t p (parse_dsum_cases t f g)
let parse_dsum_eq''
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq #(kt) #(dsum_key t) (parse_maybe_enum_key p (dsum_enum t)) #(dsum_type t) (dsum_tag_of_data t) (parse_dsum_cases t f g) input;
parse_synth_eq p (maybe_enum_key_of_repr (dsum_enum t)) input
let parse_dsum_eq_
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen (parse_maybe_enum_key p (dsum_enum t)) (dsum_tag_of_data t) (parse_dsum_cases t f g) (parse_maybe_enum_key p (dsum_enum t)) (fun input -> ()) (parse_dsum_cases_kind t f g) (parse_dsum_cases' t f g) (fun tg input -> parse_dsum_cases_eq' t f g tg input) input
let parse_dsum_eq'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_dsum_eq_ t p f g input;
parse_maybe_enum_key_eq p (dsum_enum t) input
let parse_dsum_eq
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match k with
| Known k' ->
begin match parse (dsnd (f k')) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
| Unknown k' ->
begin match parse g input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
end
))
= parse_dsum_eq_ t p f g input;
let j = parse (parse_maybe_enum_key p (dsum_enum t)) input in
match j with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_dsum_case_injective t k;
begin match k with
| Known k_ ->
parse_synth_eq (dsnd (f k_)) (synth_dsum_case t k) input_k;
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind t f k') (dsnd (f k_))) (synth_dsum_case t k) input_k
| Unknown k_ ->
parse_synth_eq g (synth_dsum_case t k) input_k;
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind t f k') g) (synth_dsum_case t k) input_k
end
let parse_dsum_eq3
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (r, consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) r in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_type_of_tag' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_dsum_case t k x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_dsum_eq t p f g input;
parse_maybe_enum_key_eq p (dsum_enum t) input
let synth_dsum_case_inverse
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_inverse (synth_dsum_case s x) (synth_dsum_case_recip s x))
= let f
(y: refine_with_tag (dsum_tag_of_data s) (x))
: Lemma
(synth_dsum_case s x (synth_dsum_case_recip s x y) == y)
= DSum?.synth_case_synth_case_recip s y
in
Classical.forall_intro f
let serialize_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(sr: (x: dsum_known_key s) -> Tot (serializer (dsnd (f x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(sg: serializer g)
(x: dsum_key s)
: Tot (serializer (parse_dsum_type_of_tag s f g x))
= match x with
| Known x' ->
serialize_ext (dsnd (f x')) (sr x') (parse_dsum_type_of_tag s f g x)
| Unknown x' ->
serialize_ext g sg (parse_dsum_type_of_tag s f g x)
let serialize_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(sr: (x: dsum_known_key s) -> Tot (serializer (dsnd (f x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(sg: serializer g)
(x: dsum_key s)
: Tot (serializer (parse_dsum_cases s f g x))
= synth_dsum_case_injective s x;
synth_dsum_case_inverse s x;
serialize_synth
_
(synth_dsum_case s x)
(serialize_dsum_type_of_tag s f sr g sg x)
(synth_dsum_case_recip s x)
()
let serialize_dsum'
(#kt: parser_kind)
(t: dsum)
(#p: parser kt (dsum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: dsum_key t) -> Tot (parser k (dsum_cases t x))))
(sc: ((x: dsum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_dsum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(kt)
#(dsum_key t)
#(parse_maybe_enum_key p (dsum_enum t))
(serialize_maybe_enum_key p s (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
#pc
sc
let serialize_dsum
(#kt: parser_kind)
(s: dsum)
(#pt: parser kt (dsum_repr_type s))
(st: serializer pt)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(sr: (x: dsum_known_key s) -> Tot (serializer (dsnd (f x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(sg: serializer g)
: Pure (serializer (parse_dsum s pt f g))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_dsum' s st #_ #(parse_dsum_cases s f g) (serialize_dsum_cases s f sr g sg)
let synth_dsum_case_recip_synth_case_known_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_known_tag x) . {:pattern (synth_case_recip (Known x) (synth_case (Known x) y))}
synth_case_recip (Known x) (synth_case (Known x) y) == y
) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: LowParse.Spec.Enum.enum key repr ->
tag_of_data: (_: data -> LowParse.Spec.Enum.maybe_enum_key e) ->
type_of_known_tag: (_: LowParse.Spec.Enum.enum_key e -> Type) ->
type_of_unknown_tag: Type ->
synth_case:
(
x: LowParse.Spec.Enum.maybe_enum_key e ->
y: LowParse.Spec.Sum.dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x
-> LowParse.Spec.Base.refine_with_tag tag_of_data x) ->
synth_case_recip:
(k: LowParse.Spec.Enum.maybe_enum_key e -> _: LowParse.Spec.Base.refine_with_tag tag_of_data k
-> LowParse.Spec.Sum.dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k) ->
x: repr
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.eqtype",
"LowParse.Spec.Enum.enum",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_type_of_tag'",
"LowParse.Spec.Base.refine_with_tag",
"Prims.l_imp",
"Prims.eq2",
"Prims.bool",
"LowParse.Spec.Enum.list_mem",
"LowParse.Spec.Enum.list_map",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.snd",
"Prims.l_Forall",
"LowParse.Spec.Enum.Unknown"
] | [] | false | false | false | false | true | let synth_dsum_case_recip_synth_case_unknown_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case:
(x: maybe_enum_key e -> y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x
-> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip:
(k: maybe_enum_key e -> (refine_with_tag tag_of_data k)
-> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(x: repr)
: GTot Type0 =
| list_mem x (list_map snd e) == false ==>
(forall (y: type_of_unknown_tag).
{:pattern (synth_case_recip (Unknown x) (synth_case (Unknown x) y))}
synth_case_recip (Unknown x) (synth_case (Unknown x) y) == y) | false |
CSL.Semantics.fst | CSL.Semantics.affine | val affine : st: CSL.Semantics.st0 -> Prims.logical | let affine (st:st0) =
forall r0 r1 s. {:pattern (st.interp (r0 `st.star` r1) s) }
st.interp (r0 `st.star` r1) s ==> st.interp r0 s | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 52,
"end_line": 96,
"start_col": 0,
"start_line": 94
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x
let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y
let equals_ext #a (equals:a -> a -> prop) (f:a -> a -> a) =
forall x1 x2 y. x1 `equals` x2 ==> f x1 y `equals` f x2 y
noeq
type st0 = {
mem:Type u#2;
evolves:P.preorder mem;
hprop:Type u#2;
invariant: mem -> hprop;
interp: hprop -> mem -> prop;
emp:hprop;
star: hprop -> hprop -> hprop;
equals: hprop -> hprop -> prop;
}
////////////////////////////////////////////////////////////////////////////////
let interp_extensionality #r #s (equals:r -> r -> prop) (f:r -> s -> prop) =
forall x y h. {:pattern equals x y; f x h} equals x y /\ f x h ==> f y h | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | st: CSL.Semantics.st0 -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"CSL.Semantics.st0",
"Prims.l_Forall",
"CSL.Semantics.__proj__Mkst0__item__hprop",
"CSL.Semantics.__proj__Mkst0__item__mem",
"Prims.l_imp",
"CSL.Semantics.__proj__Mkst0__item__interp",
"CSL.Semantics.__proj__Mkst0__item__star",
"Prims.logical"
] | [] | false | false | false | true | true | let affine (st: st0) =
| forall r0 r1 s. {:pattern (st.interp (r0 `st.star` r1) s)}
st.interp (r0 `st.star` r1) s ==> st.interp r0 s | false |
|
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_dsum_eq_ | val parse_dsum_eq_
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x))))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input ==
(match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x))) | val parse_dsum_eq_
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x))))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input ==
(match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x))) | let parse_dsum_eq_
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen (parse_maybe_enum_key p (dsum_enum t)) (dsum_tag_of_data t) (parse_dsum_cases t f g) (parse_maybe_enum_key p (dsum_enum t)) (fun input -> ()) (parse_dsum_cases_kind t f g) (parse_dsum_cases' t f g) (fun tg input -> parse_dsum_cases_eq' t f g tg input) input | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 285,
"end_line": 752,
"start_col": 0,
"start_line": 734
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k
let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)
let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x))
= synth_dsum_case_injective s x;
match x with
| Known x' -> (dsnd (f x') `parse_synth` synth_dsum_case s (Known x')) <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' -> g `parse_synth` synth_dsum_case s (Unknown x') <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
let parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma
(parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input)
= synth_dsum_case_injective s x;
match x with
| Known x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x'))) (synth_dsum_case s x) input;
parse_synth_eq (dsnd (f x')) (synth_dsum_case s (Known x')) input
| Unknown x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) g) (synth_dsum_case s x) input;
parse_synth_eq g (synth_dsum_case s (Unknown x')) input
let parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: ((x: dsum_key t) -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t))
= parse_tagged_union
#kt
#(dsum_key t)
(parse_maybe_enum_key p (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_dsum_kind
(kt: parser_kind)
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k: parser_kind)
: Tot parser_kind
= and_then_kind kt (weaken_parse_dsum_cases_kind s f k)
let parse_dsum
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag t))
: Tot (parser (parse_dsum_kind kt t f k) (dsum_type t))
= parse_dsum' t p (parse_dsum_cases t f g)
let parse_dsum_eq''
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq #(kt) #(dsum_key t) (parse_maybe_enum_key p (dsum_enum t)) #(dsum_type t) (dsum_tag_of_data t) (parse_dsum_cases t f g) input;
parse_synth_eq p (maybe_enum_key_of_repr (dsum_enum t)) input | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.dsum ->
p: LowParse.Spec.Base.parser kt (LowParse.Spec.Sum.dsum_repr_type t) ->
f:
(x: LowParse.Spec.Sum.dsum_known_key t
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag t x))) ->
g: LowParse.Spec.Base.parser k' (LowParse.Spec.Sum.dsum_type_of_unknown_tag t) ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_dsum t p f g) input ==
(match
LowParse.Spec.Base.parse (LowParse.Spec.Enum.parse_maybe_enum_key p
(LowParse.Spec.Sum.dsum_enum t))
input
with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ k consumed_k) ->
let input_k = FStar.Seq.Base.slice input consumed_k (FStar.Seq.Base.length input) in
(match LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_dsum_cases' t f g k) input_k with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ x consumed_x) ->
FStar.Pervasives.Native.Some (x, consumed_k + consumed_x))
<:
FStar.Pervasives.Native.option (LowParse.Spec.Sum.dsum_type t *
LowParse.Spec.Base.consumed_length input))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_repr_type",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.parse_tagged_union_eq_gen",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Sum.dsum_key_type",
"LowParse.Spec.Sum.dsum_enum",
"LowParse.Spec.Enum.parse_maybe_enum_key",
"LowParse.Spec.Sum.dsum_type",
"LowParse.Spec.Sum.dsum_tag_of_data",
"LowParse.Spec.Sum.weaken_parse_dsum_cases_kind",
"LowParse.Spec.Sum.parse_dsum_cases",
"Prims.unit",
"LowParse.Spec.Sum.parse_dsum_cases_kind",
"LowParse.Spec.Sum.parse_dsum_cases'",
"LowParse.Spec.Sum.parse_dsum_cases_eq'",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Sum.parse_dsum",
"FStar.Pervasives.Native.None",
"LowParse.Spec.Sum.dsum_cases",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let parse_dsum_eq_
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x))))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input ==
(match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x))) =
| parse_tagged_union_eq_gen (parse_maybe_enum_key p (dsum_enum t))
(dsum_tag_of_data t)
(parse_dsum_cases t f g)
(parse_maybe_enum_key p (dsum_enum t))
(fun input -> ())
(parse_dsum_cases_kind t f g)
(parse_dsum_cases' t f g)
(fun tg input -> parse_dsum_cases_eq' t f g tg input)
input | false |
CSL.Semantics.fst | CSL.Semantics.st | val st : Type | let st = s:st0 { st_laws s } | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 28,
"end_line": 113,
"start_col": 0,
"start_line": 113
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x
let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y
let equals_ext #a (equals:a -> a -> prop) (f:a -> a -> a) =
forall x1 x2 y. x1 `equals` x2 ==> f x1 y `equals` f x2 y
noeq
type st0 = {
mem:Type u#2;
evolves:P.preorder mem;
hprop:Type u#2;
invariant: mem -> hprop;
interp: hprop -> mem -> prop;
emp:hprop;
star: hprop -> hprop -> hprop;
equals: hprop -> hprop -> prop;
}
////////////////////////////////////////////////////////////////////////////////
let interp_extensionality #r #s (equals:r -> r -> prop) (f:r -> s -> prop) =
forall x y h. {:pattern equals x y; f x h} equals x y /\ f x h ==> f y h
let affine (st:st0) =
forall r0 r1 s. {:pattern (st.interp (r0 `st.star` r1) s) }
st.interp (r0 `st.star` r1) s ==> st.interp r0 s
////////////////////////////////////////////////////////////////////////////////
let st_laws (st:st0) =
(* standard laws about the equality relation *)
symmetry st.equals /\
transitive st.equals /\
interp_extensionality st.equals st.interp /\
(* standard laws for star forming a CM *)
associative st.equals st.star /\
commutative st.equals st.star /\
is_unit st.emp st.equals st.star /\
equals_ext st.equals st.star /\
(* We're working in an affine interpretation of SL *)
affine st | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type | Prims.Tot | [
"total"
] | [] | [
"CSL.Semantics.st0",
"CSL.Semantics.st_laws"
] | [] | false | false | false | true | true | let st =
| s: st0{st_laws s} | false |
|
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_dsum_cases_eq' | val parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma (parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input) | val parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma (parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input) | let parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma
(parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input)
= synth_dsum_case_injective s x;
match x with
| Known x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x'))) (synth_dsum_case s x) input;
parse_synth_eq (dsnd (f x')) (synth_dsum_case s (Known x')) input
| Unknown x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) g) (synth_dsum_case s x) input;
parse_synth_eq g (synth_dsum_case s (Unknown x')) input | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 59,
"end_line": 675,
"start_col": 0,
"start_line": 659
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k
let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)
let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x))
= synth_dsum_case_injective s x;
match x with
| Known x' -> (dsnd (f x') `parse_synth` synth_dsum_case s (Known x')) <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' -> g `parse_synth` synth_dsum_case s (Unknown x') <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s: LowParse.Spec.Sum.dsum ->
f:
(x: LowParse.Spec.Sum.dsum_known_key s
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag s x))) ->
g: LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_unknown_tag s) ->
x: LowParse.Spec.Sum.dsum_key s ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_dsum_cases s f g x) input ==
LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_dsum_cases' s f g x) input) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Spec.Sum.dsum_key",
"LowParse.Bytes.bytes",
"LowParse.Spec.Enum.enum_key",
"LowParse.Spec.Sum.dsum_key_type",
"LowParse.Spec.Sum.dsum_repr_type",
"LowParse.Spec.Sum.dsum_enum",
"LowParse.Spec.Combinators.parse_synth_eq",
"Prims.__proj__Mkdtuple2__item___1",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Sum.dsum_type",
"LowParse.Spec.Sum.dsum_tag_of_data",
"LowParse.Spec.Enum.Known",
"FStar.Pervasives.dsnd",
"LowParse.Spec.Sum.synth_dsum_case",
"Prims.unit",
"LowParse.Spec.Sum.weaken_parse_dsum_cases_kind",
"LowParse.Spec.Base.weaken",
"LowParse.Spec.Enum.unknown_enum_repr",
"LowParse.Spec.Enum.Unknown",
"LowParse.Spec.Sum.synth_dsum_case_injective",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Sum.dsum_cases",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Sum.parse_dsum_cases",
"LowParse.Spec.Sum.parse_dsum_cases'",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x))))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma (parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input) =
| synth_dsum_case_injective s x;
match x with
| Known x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
(synth_dsum_case s x)
input;
parse_synth_eq (dsnd (f x')) (synth_dsum_case s (Known x')) input
| Unknown x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) g) (synth_dsum_case s x) input;
parse_synth_eq g (synth_dsum_case s (Unknown x')) input | false |
CSL.Semantics.fst | CSL.Semantics.weaker_pre | val weaker_pre : pre: Mkst0?.hprop st -> next_pre: Mkst0?.hprop st -> Prims.logical | let weaker_pre (#st:st) (pre:st.hprop) (next_pre:st.hprop) =
forall (h:st.mem) (frame:st.hprop).
st.interp (pre `st.star` frame) h ==>
st.interp (next_pre `st.star` frame) h | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 42,
"end_line": 164,
"start_col": 0,
"start_line": 161
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x
let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y
let equals_ext #a (equals:a -> a -> prop) (f:a -> a -> a) =
forall x1 x2 y. x1 `equals` x2 ==> f x1 y `equals` f x2 y
noeq
type st0 = {
mem:Type u#2;
evolves:P.preorder mem;
hprop:Type u#2;
invariant: mem -> hprop;
interp: hprop -> mem -> prop;
emp:hprop;
star: hprop -> hprop -> hprop;
equals: hprop -> hprop -> prop;
}
////////////////////////////////////////////////////////////////////////////////
let interp_extensionality #r #s (equals:r -> r -> prop) (f:r -> s -> prop) =
forall x y h. {:pattern equals x y; f x h} equals x y /\ f x h ==> f y h
let affine (st:st0) =
forall r0 r1 s. {:pattern (st.interp (r0 `st.star` r1) s) }
st.interp (r0 `st.star` r1) s ==> st.interp r0 s
////////////////////////////////////////////////////////////////////////////////
let st_laws (st:st0) =
(* standard laws about the equality relation *)
symmetry st.equals /\
transitive st.equals /\
interp_extensionality st.equals st.interp /\
(* standard laws for star forming a CM *)
associative st.equals st.star /\
commutative st.equals st.star /\
is_unit st.emp st.equals st.star /\
equals_ext st.equals st.star /\
(* We're working in an affine interpretation of SL *)
affine st
let st = s:st0 { st_laws s }
(**** End state defn ****)
(**** Begin expects, provides defns ****)
/// expects (the heap assertion expected by a computation) is simply an st.hprop
///
/// provides, or the post heap assertion, is a st.hprop on [a]-typed result
type post_t (st:st) (a:Type) = a -> st.hprop
(**** End expects, provides defns ****)
effect Mst (a:Type) (#st:st) (req:st.mem -> Type0) (ens:st.mem -> a -> st.mem -> Type0) =
NMSTATE a st.mem st.evolves req ens
(**** Begin interface of actions ****)
/// Actions are essentially state transformers that preserve frames
let preserves_frame (#st:st) (pre post:st.hprop) (m0 m1:st.mem) =
forall (frame:st.hprop).
st.interp ((pre `st.star` frame) `st.star` (st.invariant m0)) m0 ==>
st.interp ((post `st.star` frame) `st.star` (st.invariant m1)) m1
let action_t
(#st:st)
(#a:Type)
(pre:st.hprop)
(post:post_t st a)
= unit ->
Mst a
(requires fun m0 -> st.interp (pre `st.star` st.invariant m0) m0)
(ensures fun m0 x m1 ->
st.interp ((post x) `st.star` st.invariant m1) m1 /\
preserves_frame pre (post x) m0 m1)
(**** End interface of actions ****)
(**** Begin definition of the computation AST ****) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | pre: Mkst0?.hprop st -> next_pre: Mkst0?.hprop st -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"CSL.Semantics.st",
"CSL.Semantics.__proj__Mkst0__item__hprop",
"Prims.l_Forall",
"CSL.Semantics.__proj__Mkst0__item__mem",
"Prims.l_imp",
"CSL.Semantics.__proj__Mkst0__item__interp",
"CSL.Semantics.__proj__Mkst0__item__star",
"Prims.logical"
] | [] | false | false | false | false | true | let weaker_pre (#st: st) (pre next_pre: st.hprop) =
| forall (h: st.mem) (frame: st.hprop).
st.interp (pre `st.star` frame) h ==> st.interp (next_pre `st.star` frame) h | false |
|
LowParse.Spec.Sum.fst | LowParse.Spec.Sum.parse_dsum_eq' | val parse_dsum_eq'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x))))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input ==
(match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x))) | val parse_dsum_eq'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x))))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input ==
(match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x))) | let parse_dsum_eq'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_dsum_eq_ t p f g input;
parse_maybe_enum_key_eq p (dsum_enum t) input | {
"file_name": "src/lowparse/LowParse.Spec.Sum.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 47,
"end_line": 774,
"start_col": 0,
"start_line": 754
} | module LowParse.Spec.Sum
include LowParse.Spec.Enum
module Seq = FStar.Seq
let synth_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: data)
: GTot (type_of_tag (tag_of_data x))
= synth_case_recip (tag_of_data x) x
noeq
type sum =
| Sum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (enum_key e))) ->
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
sum
inline_for_extraction
let sum_key_type (t: sum) : Tot eqtype =
match t with (Sum key _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let sum_repr_type (t: sum) : Tot eqtype =
match t with (Sum _ repr _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let sum_enum (t: sum) : Tot (enum (sum_key_type t) (sum_repr_type t)) =
match t with (Sum _ _ e _ _ _ _ _ _ _) -> e
inline_for_extraction
let sum_key (t: sum) : Tot Type =
enum_key (sum_enum t)
inline_for_extraction
let sum_key_type_of_sum_key (t: sum) (k: sum_key t) : Pure (sum_key_type t)
(requires True)
(ensures (fun k' -> k' == (k <: sum_key_type t)))
= k
inline_for_extraction
let sum_type (t: sum) : Tot Type =
match t with
| Sum _ _ _ data _ _ _ _ _ _ -> data
inline_for_extraction
let sum_tag_of_data (t: sum) : Tot ((x: sum_type t) -> Tot (sum_key t)) =
match t with
| Sum _ _ _ _ tag_of_data _ _ _ _ _ -> tag_of_data
inline_for_extraction
let sum_cases (t: sum) (x: sum_key t) : Type =
refine_with_tag #(sum_key t) #(sum_type t) (sum_tag_of_data t) x
inline_for_extraction
let sum_type_of_tag (t: sum) : (x: sum_key t) -> Type =
match t with
| Sum _ _ _ _ _ type_of_tag _ _ _ _ -> type_of_tag
let weaken_parse_cases_kind
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
: Tot parser_kind
= let keys : list (sum_key_type s) = List.Tot.map fst (sum_enum s) in
glb_list_of #(sum_key_type s) (fun (x: sum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else default_parser_kind
) (List.Tot.map fst (sum_enum s))
inline_for_extraction
let synth_sum_case (s: sum) : (k: sum_key s) -> (x: sum_type_of_tag s k) -> Tot (sum_cases s k) =
match s with
| Sum _ _ _ _ _ _ synth_case _ _ _ -> synth_case
let synth_sum_case_injective (s: sum) (k: sum_key s) : Lemma
(synth_injective (synth_sum_case s k))
= Classical.forall_intro (Sum?.synth_case_recip_synth_case s k)
let parse_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (weaken_parse_cases_kind s f) (sum_cases s x))
= synth_sum_case_injective s x;
weaken (weaken_parse_cases_kind s f) (dsnd (f x)) `parse_synth` (synth_sum_case s x)
let parse_sum_cases_eq
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == (match parse (dsnd (f x)) input with
| None -> None
| Some (y, consumed) -> Some (synth_sum_case s x y, consumed)
))
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input
let parse_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
: Tot (parser (dfst (f x)) (sum_cases s x))
=
synth_sum_case_injective s x;
dsnd (f x) `parse_synth` synth_sum_case s x
let parse_sum_cases_eq'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(x: sum_key s)
(input: bytes)
: Lemma
(parse (parse_sum_cases s f x) input == parse (parse_sum_cases' s f x) input)
= synth_sum_case_injective s x;
parse_synth_eq (weaken (weaken_parse_cases_kind s f) (dsnd (f x))) (synth_sum_case s x) input;
parse_synth_eq (dsnd (f x)) (synth_sum_case s x) input
let parse_sum'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(#k: parser_kind)
(pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
: Tot (parser (and_then_kind (parse_filter_kind kt) k) (sum_type t))
= parse_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_sum_kind
(kt: parser_kind)
(t: sum)
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot parser_kind
= and_then_kind (parse_filter_kind kt) (weaken_parse_cases_kind t pc)
let parse_sum
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
: Tot (parser (parse_sum_kind kt t pc) (sum_type t))
= parse_sum' t p (parse_sum_cases t pc)
let parse_sum_eq'
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match
// parse (synth_sum_case_injective t k; parse_synth (dsnd (pc k)) (synth_sum_case t k)) input_k
parse (parse_sum_cases' t pc k) input_k
with
| None -> None
| Some (x, consumed_x) -> Some ((x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen
#(parse_filter_kind kt)
#(sum_key t)
(parse_enum_key p (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
(parse_sum_cases t pc)
(parse_enum_key p (sum_enum t))
(fun input -> ())
(fun k -> dfst (pc k))
(parse_sum_cases' t pc)
(fun k input -> parse_sum_cases_eq' t pc k input)
input
let parse_sum_eq
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse (parse_enum_key p (sum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
))
= parse_sum_eq' t p pc input;
match parse (parse_enum_key p (sum_enum t)) input with
| None -> ()
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
synth_sum_case_injective t k;
parse_synth_eq (dsnd (pc k)) (synth_sum_case t k) input_k
let parse_sum_eq''
(#kt: parser_kind)
(t: sum)
(p: parser kt (sum_repr_type t))
(pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(input: bytes)
: Lemma
(parse (parse_sum t p pc) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
let k = maybe_enum_key_of_repr (sum_enum t) k' in
begin match k with
| Known k ->
begin match parse (dsnd (pc k)) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((synth_sum_case t k x <: sum_type t), consumed_k + consumed_x)
end
| _ -> None
end
))
= parse_sum_eq t p pc input;
parse_enum_key_eq p (sum_enum t) input
inline_for_extraction
let synth_sum_case_recip (s: sum) (k: sum_key s) (x: sum_cases s k) : Tot (sum_type_of_tag s k) =
match s with (Sum _ _ _ _ _ _ _ synth_case_recip _ _) ->
synth_case_recip k x
let synth_sum_case_inverse (s: sum) (k: sum_key s) : Lemma
(synth_inverse (synth_sum_case s k) (synth_sum_case_recip s k))
= Classical.forall_intro (Sum?.synth_case_synth_case_recip s)
let serialize_sum_cases'
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases' s f x))
= synth_sum_case_injective s x;
synth_sum_case_inverse s x;
(serialize_synth
_
(synth_sum_case s x)
(sr x)
(synth_sum_case_recip s x)
()
)
let serialize_sum_cases
(s: sum)
(f: (x: sum_key s) -> Tot (k: parser_kind & parser k (sum_type_of_tag s x)))
(sr: (x: sum_key s) -> Tot (serializer (dsnd (f x))))
(x: sum_key s)
: Tot (serializer (parse_sum_cases s f x))
= Classical.forall_intro (parse_sum_cases_eq' s f x);
serialize_ext
(parse_sum_cases' s f x)
(serialize_sum_cases' s f sr x)
(parse_sum_cases s f x)
let serialize_sum'
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#k: parser_kind)
(#pc: ((x: sum_key t) -> Tot (parser k (sum_cases t x))))
(sc: ((x: sum_key t) -> Tot (serializer (pc x))))
: Pure (serializer (parse_sum' t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_tagged_union
#(parse_filter_kind kt)
#(sum_key t)
#(parse_enum_key p (sum_enum t))
(serialize_enum_key p s (sum_enum t))
#(sum_type t)
(sum_tag_of_data t)
#k
#pc
sc
let serialize_sum
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
: Pure (serializer (parse_sum t p pc))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= // FIXME: WHY WHY WHY is implicit argument inference failing here? (i.e. introducing an eta-expansion)
serialize_sum' t s #_ #(parse_sum_cases t pc) (serialize_sum_cases t pc sc)
let serialize_sum_eq
(#kt: parser_kind)
(t: sum)
(#p: parser kt (sum_repr_type t))
(s: serializer p)
(#pc: ((x: sum_key t) -> Tot (k: parser_kind & parser k (sum_type_of_tag t x))))
(sc: ((x: sum_key t) -> Tot (serializer (dsnd (pc x)))))
(x: sum_type t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (
serialize (serialize_sum t s sc) x == (
let tg = sum_tag_of_data t x in
serialize (serialize_enum_key _ s (sum_enum t)) tg `Seq.append`
serialize (sc tg) (synth_sum_case_recip t tg x)
)))
= let tg = sum_tag_of_data t x in
synth_sum_case_injective t tg;
synth_sum_case_inverse t tg;
serialize_synth_eq (dsnd (pc tg)) (synth_sum_case t tg) (sc tg) (synth_sum_case_recip t tg) () x
inline_for_extraction
let make_sum
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
: Tot (
(type_of_tag: (enum_key e -> Tot Type)) ->
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k))) ->
(synth_case_recip_synth_case: (
(x: enum_key e) ->
(y: type_of_tag x) ->
Lemma
(synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Lemma
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x)
)) ->
Tot sum)
= Sum key repr e data tag_of_data
let synth_case_recip_synth_case_post
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(x: key)
: GTot Type0
=
list_mem x (list_map fst e) ==> (
forall (y: type_of_tag x) . {:pattern (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y))}
synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y
)
inline_for_extraction
let make_sum'
(#key #repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> Tot (enum_key e)))
(type_of_tag: (enum_key e -> Tot Type))
(synth_case: ((x: enum_key e) -> (y: type_of_tag x) -> Tot (refine_with_tag tag_of_data x)))
(synth_case_recip: ((k: enum_key e) -> (x: refine_with_tag tag_of_data k) -> Tot (type_of_tag k)))
(synth_case_recip_synth_case: (
(x: key) ->
Tot (squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x))
))
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
))
: Tot sum
= make_sum e tag_of_data type_of_tag synth_case synth_case_recip (fun x y ->
let sq : squash (synth_case_recip_synth_case_post e tag_of_data type_of_tag synth_case synth_case_recip x) =
synth_case_recip_synth_case x in
assert (synth_case_recip' e tag_of_data type_of_tag synth_case_recip (synth_case x y) == y))
(fun x -> let _ = synth_case_synth_case_recip x in assert (synth_case (tag_of_data x) (synth_case_recip' e tag_of_data type_of_tag synth_case_recip x) == x))
(* Sum with default case *)
inline_for_extraction
let dsum_type_of_tag'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(k: maybe_enum_key e)
: Type
= match k with
| Unknown _ -> type_of_unknown_tag
| Known k -> type_of_known_tag k
let synth_dsum_case'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_known_case: ((x: enum_key e) -> (y: type_of_known_tag x) -> Tot (refine_with_tag tag_of_data (Known x))))
(synth_unknown_case: ((x: unknown_enum_repr e) -> type_of_unknown_tag -> Tot (refine_with_tag tag_of_data (Unknown x))))
(xy: (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x))
: GTot data
= let (| x, y |) = xy in
match x with
| Unknown x -> synth_unknown_case x y
| Known x -> synth_known_case x y
let synth_dsum_case_recip'
(#key: eqtype)
(#repr: eqtype)
(e: enum key repr)
(#data: Type)
(tag_of_data: (data -> GTot (maybe_enum_key e)))
(type_of_known_tag: (enum_key e -> Tot Type))
(type_of_unknown_tag: Type)
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k)))
(y: data)
: GTot (x: maybe_enum_key e & dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x)
= let tg = tag_of_data y in
(| tg, synth_case_recip tg y |)
noeq
type dsum =
| DSum:
(key: eqtype) ->
(repr: eqtype) ->
(e: enum key repr) ->
(data: Type) ->
(tag_of_data: (data -> Tot (maybe_enum_key e))) ->
(type_of_known_tag: (enum_key e -> Tot Type)) ->
(type_of_unknown_tag: Type) ->
(synth_case: ((x: maybe_enum_key e) -> (y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) -> Tot (refine_with_tag tag_of_data x))) ->
(synth_case_recip: ((k: maybe_enum_key e) -> (refine_with_tag tag_of_data k) -> Tot (dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag k))) ->
(synth_case_recip_synth_case: (
(x: maybe_enum_key e) ->
(y: dsum_type_of_tag' e type_of_known_tag type_of_unknown_tag x) ->
Tot (squash
(synth_case_recip x (synth_case x y) == y)
)
)) ->
(synth_case_synth_case_recip: (
(x: data) ->
Tot (squash
(synth_case (tag_of_data x) (synth_case_recip (tag_of_data x) x) == x)
)
)) ->
dsum
inline_for_extraction
let dsum_key_type (t: dsum) : Tot eqtype =
match t with (DSum key _ _ _ _ _ _ _ _ _ _) -> key
inline_for_extraction
let dsum_repr_type (t: dsum) : Tot eqtype =
match t with (DSum _ repr _ _ _ _ _ _ _ _ _) -> repr
inline_for_extraction
let dsum_enum (t: dsum) : Tot (enum (dsum_key_type t) (dsum_repr_type t)) =
match t with (DSum _ _ e _ _ _ _ _ _ _ _) -> e
inline_for_extraction
let dsum_key (t: dsum) : Tot Type =
maybe_enum_key (dsum_enum t)
inline_for_extraction
let dsum_known_key (t: dsum) : Tot Type =
enum_key (dsum_enum t)
inline_for_extraction
let dsum_unknown_key (t: dsum) : Tot Type =
unknown_enum_repr (dsum_enum t)
inline_for_extraction
let dsum_type (t: dsum) : Tot Type =
//NS: this was rewritten from `let DSum ... data .. = t in data`
//to workaround a glitch in desugaring the above, which introduces
//an additional, unreduced let binding for extraction
match t with
| DSum _ _ _ data _ _ _ _ _ _ _ -> data
inline_for_extraction
let dsum_tag_of_data (t: dsum) : Tot ((x: dsum_type t) -> Tot (dsum_key t)) =
match t with (DSum _ _ _ _ tag_of_data _ _ _ _ _ _) -> tag_of_data
inline_for_extraction
let dsum_cases (t: dsum) (x: dsum_key t) : Type =
refine_with_tag #(dsum_key t) #(dsum_type t) (dsum_tag_of_data t) x
inline_for_extraction
let dsum_type_of_known_tag (t: dsum) : Tot ((k: dsum_known_key t) -> Tot Type) =
match t with (DSum _ _ _ _ _ type_of_known_tag _ _ _ _ _) ->
type_of_known_tag
inline_for_extraction
let dsum_type_of_unknown_tag (t: dsum) : Tot Type =
match t with (DSum _ _ _ _ _ _ type_of_unknown_tag _ _ _ _) ->
type_of_unknown_tag
inline_for_extraction
let dsum_type_of_tag (t: dsum) =
dsum_type_of_tag' (dsum_enum t) (dsum_type_of_known_tag t) (dsum_type_of_unknown_tag t)
let weaken_parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k' : parser_kind)
: Tot parser_kind
= let keys : list (dsum_key_type s) = List.Tot.map fst (dsum_enum s) in
glb_list_of #(dsum_key_type s) (fun (x: dsum_key_type s) ->
if List.Tot.mem x keys
then let (| k, _ |) = f x in k
else k'
) (List.Tot.map fst (dsum_enum s)) `glb` k'
let weaken_parse_dsum_cases_kind'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k' : parser_kind)
(p: parser k' (dsum_type_of_unknown_tag s))
: Tot parser_kind
= weaken_parse_dsum_cases_kind s f k'
inline_for_extraction
let synth_dsum_case
(s: dsum)
: Tot ((x: dsum_key s) -> dsum_type_of_tag s x -> Tot (refine_with_tag (dsum_tag_of_data s) x))
= match s with DSum _ _ _ _ _ _ _ synth_case _ _ _ -> synth_case
inline_for_extraction
let synth_dsum_case_recip
(s: dsum)
: Tot ((x: dsum_key s) -> refine_with_tag (dsum_tag_of_data s) x -> Tot (dsum_type_of_tag s x))
= match s with DSum _ _ _ _ _ _ _ _ synth_case_recip _ _ -> synth_case_recip
let synth_dsum_case_injective
(s: dsum)
(x: dsum_key s)
: Lemma
(synth_injective (synth_dsum_case s x))
= let f
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(requires (synth_dsum_case s x y1 == synth_dsum_case s x y2))
(ensures (y1 == y2))
= let k1 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y1) == y1) =
DSum?.synth_case_recip_synth_case s x y1
in
let k2 : squash (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2) =
DSum?.synth_case_recip_synth_case s x y2
in
// FIXME: WHY WHY WHY is this assert necessary?
assert (synth_dsum_case_recip s x (synth_dsum_case s x y2) == y2);
()
in
let g
(y1: dsum_type_of_tag s x)
(y2: dsum_type_of_tag s x)
: Lemma
(synth_dsum_case s x y1 == synth_dsum_case s x y2 ==> y1 == y2)
= Classical.move_requires (f y1) y2
in
Classical.forall_intro_2 g
let parse_dsum_type_of_tag
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)) (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x')))
| Unknown x' -> weaken (weaken_parse_dsum_cases_kind s f k) g <: parser (weaken_parse_dsum_cases_kind s f k) (dsum_type_of_tag s x)
let parse_dsum_cases
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (weaken_parse_dsum_cases_kind s f k) (dsum_cases s x))
= synth_dsum_case_injective s x;
parse_dsum_type_of_tag s f g x `parse_synth` synth_dsum_case s x
let parse_dsum_cases_kind
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot parser_kind
= match x with
| Known k -> dfst (f k)
| _ -> k
let parse_dsum_type_of_tag'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x))
= match x with
| Known x' -> coerce (parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)) (dsnd (f x'))
| Unknown x' -> g <: parser (parse_dsum_cases_kind s f g x) (dsum_type_of_tag s x)
let parse_dsum_cases'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
: Tot (parser (parse_dsum_cases_kind s f g x) (dsum_cases s x))
= synth_dsum_case_injective s x;
match x with
| Known x' -> (dsnd (f x') `parse_synth` synth_dsum_case s (Known x')) <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
| Unknown x' -> g `parse_synth` synth_dsum_case s (Unknown x') <: parser (parse_dsum_cases_kind s f g x) (dsum_cases s x)
let parse_dsum_cases_eq'
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag s))
(x: dsum_key s)
(input: bytes)
: Lemma
(parse (parse_dsum_cases s f g x) input == parse (parse_dsum_cases' s f g x) input)
= synth_dsum_case_injective s x;
match x with
| Known x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) (dsnd (f x'))) (synth_dsum_case s x) input;
parse_synth_eq (dsnd (f x')) (synth_dsum_case s (Known x')) input
| Unknown x' ->
parse_synth_eq (weaken (weaken_parse_dsum_cases_kind s f k) g) (synth_dsum_case s x) input;
parse_synth_eq g (synth_dsum_case s (Unknown x')) input
let parse_dsum'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(#k: parser_kind)
(pc: ((x: dsum_key t) -> Tot (parser k (dsum_cases t x))))
: Tot (parser (and_then_kind kt k) (dsum_type t))
= parse_tagged_union
#kt
#(dsum_key t)
(parse_maybe_enum_key p (dsum_enum t))
#(dsum_type t)
(dsum_tag_of_data t)
#k
pc
inline_for_extraction
let parse_dsum_kind
(kt: parser_kind)
(s: dsum)
(f: (x: dsum_known_key s) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag s x)))
(k: parser_kind)
: Tot parser_kind
= and_then_kind kt (weaken_parse_dsum_cases_kind s f k)
let parse_dsum
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k: parser_kind)
(g: parser k (dsum_type_of_unknown_tag t))
: Tot (parser (parse_dsum_kind kt t f k) (dsum_type t))
= parse_dsum' t p (parse_dsum_cases t f g)
let parse_dsum_eq''
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq #(kt) #(dsum_key t) (parse_maybe_enum_key p (dsum_enum t)) #(dsum_type t) (dsum_tag_of_data t) (parse_dsum_cases t f g) input;
parse_synth_eq p (maybe_enum_key_of_repr (dsum_enum t)) input
let parse_dsum_eq_
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t) -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x)))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input == (match parse (parse_maybe_enum_key p (dsum_enum t)) input with
| None -> None
| Some (k, consumed_k) ->
let input_k = Seq.slice input consumed_k (Seq.length input) in
begin match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x)
end
))
= parse_tagged_union_eq_gen (parse_maybe_enum_key p (dsum_enum t)) (dsum_tag_of_data t) (parse_dsum_cases t f g) (parse_maybe_enum_key p (dsum_enum t)) (fun input -> ()) (parse_dsum_cases_kind t f g) (parse_dsum_cases' t f g) (fun tg input -> parse_dsum_cases_eq' t f g tg input) input | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Enum.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Sum.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Enum",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.Spec.Sum.dsum ->
p: LowParse.Spec.Base.parser kt (LowParse.Spec.Sum.dsum_repr_type t) ->
f:
(x: LowParse.Spec.Sum.dsum_known_key t
-> Prims.dtuple2 LowParse.Spec.Base.parser_kind
(fun k -> LowParse.Spec.Base.parser k (LowParse.Spec.Sum.dsum_type_of_known_tag t x))) ->
g: LowParse.Spec.Base.parser k' (LowParse.Spec.Sum.dsum_type_of_unknown_tag t) ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_dsum t p f g) input ==
(match LowParse.Spec.Base.parse p input with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ k' consumed_k) ->
let k = LowParse.Spec.Enum.maybe_enum_key_of_repr (LowParse.Spec.Sum.dsum_enum t) k' in
let input_k = FStar.Seq.Base.slice input consumed_k (FStar.Seq.Base.length input) in
(match LowParse.Spec.Base.parse (LowParse.Spec.Sum.parse_dsum_cases' t f g k) input_k with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ x consumed_x) ->
FStar.Pervasives.Native.Some (x, consumed_k + consumed_x))
<:
FStar.Pervasives.Native.option (LowParse.Spec.Sum.dsum_type t *
LowParse.Spec.Base.consumed_length input))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Sum.dsum",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Sum.dsum_repr_type",
"LowParse.Spec.Sum.dsum_known_key",
"Prims.dtuple2",
"LowParse.Spec.Sum.dsum_type_of_known_tag",
"LowParse.Spec.Sum.dsum_type_of_unknown_tag",
"LowParse.Bytes.bytes",
"LowParse.Spec.Enum.parse_maybe_enum_key_eq",
"LowParse.Spec.Sum.dsum_key_type",
"LowParse.Spec.Sum.dsum_enum",
"Prims.unit",
"LowParse.Spec.Sum.parse_dsum_eq_",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Sum.dsum_type",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Sum.parse_dsum",
"FStar.Pervasives.Native.None",
"LowParse.Spec.Sum.dsum_cases",
"LowParse.Spec.Sum.parse_dsum_cases'",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"LowParse.Spec.Enum.maybe_enum_key",
"LowParse.Spec.Enum.maybe_enum_key_of_repr",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let parse_dsum_eq'
(#kt: parser_kind)
(t: dsum)
(p: parser kt (dsum_repr_type t))
(f: (x: dsum_known_key t -> Tot (k: parser_kind & parser k (dsum_type_of_known_tag t x))))
(#k': parser_kind)
(g: parser k' (dsum_type_of_unknown_tag t))
(input: bytes)
: Lemma
(parse (parse_dsum t p f g) input ==
(match parse p input with
| None -> None
| Some (k', consumed_k) ->
let k = maybe_enum_key_of_repr (dsum_enum t) k' in
let input_k = Seq.slice input consumed_k (Seq.length input) in
match parse (parse_dsum_cases' t f g k) input_k with
| None -> None
| Some (x, consumed_x) -> Some ((x <: dsum_type t), consumed_k + consumed_x))) =
| parse_dsum_eq_ t p f g input;
parse_maybe_enum_key_eq p (dsum_enum t) input | false |
CSL.Semantics.fst | CSL.Semantics.stronger_post | val stronger_post : post: CSL.Semantics.post_t st a -> next_post: CSL.Semantics.post_t st a -> Prims.logical | let stronger_post (#st:st) (#a:Type u#a) (post next_post:post_t st a) =
forall (x:a) (h:st.mem) (frame:st.hprop).
st.interp (next_post x `st.star` frame) h ==>
st.interp (post x `st.star` frame) h | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 40,
"end_line": 169,
"start_col": 0,
"start_line": 166
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x
let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y
let equals_ext #a (equals:a -> a -> prop) (f:a -> a -> a) =
forall x1 x2 y. x1 `equals` x2 ==> f x1 y `equals` f x2 y
noeq
type st0 = {
mem:Type u#2;
evolves:P.preorder mem;
hprop:Type u#2;
invariant: mem -> hprop;
interp: hprop -> mem -> prop;
emp:hprop;
star: hprop -> hprop -> hprop;
equals: hprop -> hprop -> prop;
}
////////////////////////////////////////////////////////////////////////////////
let interp_extensionality #r #s (equals:r -> r -> prop) (f:r -> s -> prop) =
forall x y h. {:pattern equals x y; f x h} equals x y /\ f x h ==> f y h
let affine (st:st0) =
forall r0 r1 s. {:pattern (st.interp (r0 `st.star` r1) s) }
st.interp (r0 `st.star` r1) s ==> st.interp r0 s
////////////////////////////////////////////////////////////////////////////////
let st_laws (st:st0) =
(* standard laws about the equality relation *)
symmetry st.equals /\
transitive st.equals /\
interp_extensionality st.equals st.interp /\
(* standard laws for star forming a CM *)
associative st.equals st.star /\
commutative st.equals st.star /\
is_unit st.emp st.equals st.star /\
equals_ext st.equals st.star /\
(* We're working in an affine interpretation of SL *)
affine st
let st = s:st0 { st_laws s }
(**** End state defn ****)
(**** Begin expects, provides defns ****)
/// expects (the heap assertion expected by a computation) is simply an st.hprop
///
/// provides, or the post heap assertion, is a st.hprop on [a]-typed result
type post_t (st:st) (a:Type) = a -> st.hprop
(**** End expects, provides defns ****)
effect Mst (a:Type) (#st:st) (req:st.mem -> Type0) (ens:st.mem -> a -> st.mem -> Type0) =
NMSTATE a st.mem st.evolves req ens
(**** Begin interface of actions ****)
/// Actions are essentially state transformers that preserve frames
let preserves_frame (#st:st) (pre post:st.hprop) (m0 m1:st.mem) =
forall (frame:st.hprop).
st.interp ((pre `st.star` frame) `st.star` (st.invariant m0)) m0 ==>
st.interp ((post `st.star` frame) `st.star` (st.invariant m1)) m1
let action_t
(#st:st)
(#a:Type)
(pre:st.hprop)
(post:post_t st a)
= unit ->
Mst a
(requires fun m0 -> st.interp (pre `st.star` st.invariant m0) m0)
(ensures fun m0 x m1 ->
st.interp ((post x) `st.star` st.invariant m1) m1 /\
preserves_frame pre (post x) m0 m1)
(**** End interface of actions ****)
(**** Begin definition of the computation AST ****)
let weaker_pre (#st:st) (pre:st.hprop) (next_pre:st.hprop) =
forall (h:st.mem) (frame:st.hprop).
st.interp (pre `st.star` frame) h ==>
st.interp (next_pre `st.star` frame) h | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | post: CSL.Semantics.post_t st a -> next_post: CSL.Semantics.post_t st a -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"CSL.Semantics.st",
"CSL.Semantics.post_t",
"Prims.l_Forall",
"CSL.Semantics.__proj__Mkst0__item__mem",
"CSL.Semantics.__proj__Mkst0__item__hprop",
"Prims.l_imp",
"CSL.Semantics.__proj__Mkst0__item__interp",
"CSL.Semantics.__proj__Mkst0__item__star",
"Prims.logical"
] | [] | false | false | false | false | true | let stronger_post (#st: st) (#a: Type u#a) (post next_post: post_t st a) =
| forall (x: a) (h: st.mem) (frame: st.hprop).
st.interp ((next_post x) `st.star` frame) h ==> st.interp ((post x) `st.star` frame) h | false |
|
CSL.Semantics.fst | CSL.Semantics.weakening_ok | val weakening_ok : pre: Mkst0?.hprop st ->
post: CSL.Semantics.post_t st a ->
wpre: Mkst0?.hprop st ->
wpost: CSL.Semantics.post_t st a
-> Prims.logical | let weakening_ok (#st:st) (#a:Type u#a) (pre:st.hprop) (post:post_t st a)
(wpre:st.hprop) (wpost:post_t st a)
= weaker_pre wpre pre /\ stronger_post wpost post | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 49,
"end_line": 173,
"start_col": 0,
"start_line": 171
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x
let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y
let equals_ext #a (equals:a -> a -> prop) (f:a -> a -> a) =
forall x1 x2 y. x1 `equals` x2 ==> f x1 y `equals` f x2 y
noeq
type st0 = {
mem:Type u#2;
evolves:P.preorder mem;
hprop:Type u#2;
invariant: mem -> hprop;
interp: hprop -> mem -> prop;
emp:hprop;
star: hprop -> hprop -> hprop;
equals: hprop -> hprop -> prop;
}
////////////////////////////////////////////////////////////////////////////////
let interp_extensionality #r #s (equals:r -> r -> prop) (f:r -> s -> prop) =
forall x y h. {:pattern equals x y; f x h} equals x y /\ f x h ==> f y h
let affine (st:st0) =
forall r0 r1 s. {:pattern (st.interp (r0 `st.star` r1) s) }
st.interp (r0 `st.star` r1) s ==> st.interp r0 s
////////////////////////////////////////////////////////////////////////////////
let st_laws (st:st0) =
(* standard laws about the equality relation *)
symmetry st.equals /\
transitive st.equals /\
interp_extensionality st.equals st.interp /\
(* standard laws for star forming a CM *)
associative st.equals st.star /\
commutative st.equals st.star /\
is_unit st.emp st.equals st.star /\
equals_ext st.equals st.star /\
(* We're working in an affine interpretation of SL *)
affine st
let st = s:st0 { st_laws s }
(**** End state defn ****)
(**** Begin expects, provides defns ****)
/// expects (the heap assertion expected by a computation) is simply an st.hprop
///
/// provides, or the post heap assertion, is a st.hprop on [a]-typed result
type post_t (st:st) (a:Type) = a -> st.hprop
(**** End expects, provides defns ****)
effect Mst (a:Type) (#st:st) (req:st.mem -> Type0) (ens:st.mem -> a -> st.mem -> Type0) =
NMSTATE a st.mem st.evolves req ens
(**** Begin interface of actions ****)
/// Actions are essentially state transformers that preserve frames
let preserves_frame (#st:st) (pre post:st.hprop) (m0 m1:st.mem) =
forall (frame:st.hprop).
st.interp ((pre `st.star` frame) `st.star` (st.invariant m0)) m0 ==>
st.interp ((post `st.star` frame) `st.star` (st.invariant m1)) m1
let action_t
(#st:st)
(#a:Type)
(pre:st.hprop)
(post:post_t st a)
= unit ->
Mst a
(requires fun m0 -> st.interp (pre `st.star` st.invariant m0) m0)
(ensures fun m0 x m1 ->
st.interp ((post x) `st.star` st.invariant m1) m1 /\
preserves_frame pre (post x) m0 m1)
(**** End interface of actions ****)
(**** Begin definition of the computation AST ****)
let weaker_pre (#st:st) (pre:st.hprop) (next_pre:st.hprop) =
forall (h:st.mem) (frame:st.hprop).
st.interp (pre `st.star` frame) h ==>
st.interp (next_pre `st.star` frame) h
let stronger_post (#st:st) (#a:Type u#a) (post next_post:post_t st a) =
forall (x:a) (h:st.mem) (frame:st.hprop).
st.interp (next_post x `st.star` frame) h ==>
st.interp (post x `st.star` frame) h | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
pre: Mkst0?.hprop st ->
post: CSL.Semantics.post_t st a ->
wpre: Mkst0?.hprop st ->
wpost: CSL.Semantics.post_t st a
-> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"CSL.Semantics.st",
"CSL.Semantics.__proj__Mkst0__item__hprop",
"CSL.Semantics.post_t",
"Prims.l_and",
"CSL.Semantics.weaker_pre",
"CSL.Semantics.stronger_post",
"Prims.logical"
] | [] | false | false | false | false | true | let weakening_ok
(#st: st)
(#a: Type u#a)
(pre: st.hprop)
(post: post_t st a)
(wpre: st.hprop)
(wpost: post_t st a)
=
| weaker_pre wpre pre /\ stronger_post wpost post | false |
|
CSL.Semantics.fst | CSL.Semantics.step_ens | val step_ens:
#st: st ->
#a: Type u#a ->
#pre: st.hprop ->
#post: post_t st a ->
f: m st a pre post ->
st.mem ->
step_result st a ->
st.mem
-> Type0 | val step_ens:
#st: st ->
#a: Type u#a ->
#pre: st.hprop ->
#post: post_t st a ->
f: m st a pre post ->
st.mem ->
step_result st a ->
st.mem
-> Type0 | let step_ens (#st:st) (#a:Type u#a) (#pre:st.hprop) (#post:post_t st a)
(f:m st a pre post)
: st.mem -> step_result st a -> st.mem -> Type0
= fun m0 r m1 ->
let Step #_ #_ #next_pre #next_post _ = r in
st.interp (next_pre `st.star` st.invariant m1) m1 /\
stronger_post post next_post /\
preserves_frame pre next_pre m0 m1 | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 36,
"end_line": 271,
"start_col": 0,
"start_line": 264
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x
let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y
let equals_ext #a (equals:a -> a -> prop) (f:a -> a -> a) =
forall x1 x2 y. x1 `equals` x2 ==> f x1 y `equals` f x2 y
noeq
type st0 = {
mem:Type u#2;
evolves:P.preorder mem;
hprop:Type u#2;
invariant: mem -> hprop;
interp: hprop -> mem -> prop;
emp:hprop;
star: hprop -> hprop -> hprop;
equals: hprop -> hprop -> prop;
}
////////////////////////////////////////////////////////////////////////////////
let interp_extensionality #r #s (equals:r -> r -> prop) (f:r -> s -> prop) =
forall x y h. {:pattern equals x y; f x h} equals x y /\ f x h ==> f y h
let affine (st:st0) =
forall r0 r1 s. {:pattern (st.interp (r0 `st.star` r1) s) }
st.interp (r0 `st.star` r1) s ==> st.interp r0 s
////////////////////////////////////////////////////////////////////////////////
let st_laws (st:st0) =
(* standard laws about the equality relation *)
symmetry st.equals /\
transitive st.equals /\
interp_extensionality st.equals st.interp /\
(* standard laws for star forming a CM *)
associative st.equals st.star /\
commutative st.equals st.star /\
is_unit st.emp st.equals st.star /\
equals_ext st.equals st.star /\
(* We're working in an affine interpretation of SL *)
affine st
let st = s:st0 { st_laws s }
(**** End state defn ****)
(**** Begin expects, provides defns ****)
/// expects (the heap assertion expected by a computation) is simply an st.hprop
///
/// provides, or the post heap assertion, is a st.hprop on [a]-typed result
type post_t (st:st) (a:Type) = a -> st.hprop
(**** End expects, provides defns ****)
effect Mst (a:Type) (#st:st) (req:st.mem -> Type0) (ens:st.mem -> a -> st.mem -> Type0) =
NMSTATE a st.mem st.evolves req ens
(**** Begin interface of actions ****)
/// Actions are essentially state transformers that preserve frames
let preserves_frame (#st:st) (pre post:st.hprop) (m0 m1:st.mem) =
forall (frame:st.hprop).
st.interp ((pre `st.star` frame) `st.star` (st.invariant m0)) m0 ==>
st.interp ((post `st.star` frame) `st.star` (st.invariant m1)) m1
let action_t
(#st:st)
(#a:Type)
(pre:st.hprop)
(post:post_t st a)
= unit ->
Mst a
(requires fun m0 -> st.interp (pre `st.star` st.invariant m0) m0)
(ensures fun m0 x m1 ->
st.interp ((post x) `st.star` st.invariant m1) m1 /\
preserves_frame pre (post x) m0 m1)
(**** End interface of actions ****)
(**** Begin definition of the computation AST ****)
let weaker_pre (#st:st) (pre:st.hprop) (next_pre:st.hprop) =
forall (h:st.mem) (frame:st.hprop).
st.interp (pre `st.star` frame) h ==>
st.interp (next_pre `st.star` frame) h
let stronger_post (#st:st) (#a:Type u#a) (post next_post:post_t st a) =
forall (x:a) (h:st.mem) (frame:st.hprop).
st.interp (next_post x `st.star` frame) h ==>
st.interp (post x `st.star` frame) h
let weakening_ok (#st:st) (#a:Type u#a) (pre:st.hprop) (post:post_t st a)
(wpre:st.hprop) (wpost:post_t st a)
= weaker_pre wpre pre /\ stronger_post wpost post
noeq
type m (st:st) :
a:Type u#a ->
pre:st.hprop ->
post:post_t st a -> Type
=
| Ret:
#a:Type u#a ->
post:post_t st a ->
x:a ->
m st a (post x) post
| Bind:
#a:Type u#a ->
#pre:st.hprop ->
#post_a:post_t st a ->
#b:Type u#a ->
#post_b:post_t st b ->
f:m st a pre post_a ->
g:(x:a -> Dv (m st b (post_a x) post_b)) ->
m st b pre post_b
| Act:
#a:Type u#a ->
#pre:st.hprop ->
#post:post_t st a ->
f:action_t #st #a pre post ->
m st a pre post
| Frame:
#a:Type ->
#pre:st.hprop ->
#post:post_t st a ->
f:m st a pre post ->
frame:st.hprop ->
m st a (pre `st.star` frame) (fun x -> post x `st.star` frame)
| Par:
#aL:Type u#a ->
#preL:st.hprop ->
#postL:post_t st aL ->
mL:m st aL preL postL ->
#aR:Type u#a ->
#preR:st.hprop ->
#postR:post_t st aR ->
mR:m st aR preR postR ->
m st (aL & aR) (preL `st.star` preR) (fun (xL, xR) -> postL xL `st.star` postR xR)
| Weaken:
#a:Type u#a ->
#pre:st.hprop ->
#post:post_t st a ->
wpre:st.hprop ->
wpost:post_t st a ->
_:squash (weakening_ok pre post wpre wpost) ->
m st a pre post ->
m st a wpre wpost
(**** End definition of the computation AST ****)
(**** Stepping relation ****)
/// All steps preserve frames
noeq
type step_result (st:st) (a:Type u#a) =
| Step:
#next_pre:st.hprop ->
#next_post:post_t st a ->
m st a next_pre next_post ->
step_result st a
(**** Type of the single-step interpreter ****)
/// Interpreter is setup as an NMST function from computation trees to computation trees
///
/// As the computation evolves, the post becomes stronger
unfold
let step_req (#st:st) (#a:Type u#a) (#pre:st.hprop) (#post:post_t st a) (f:m st a pre post)
: st.mem -> Type0
= fun m0 -> st.interp (pre `st.star` st.invariant m0) m0 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
f: CSL.Semantics.m st a pre post ->
_: Mkst0?.mem st ->
_: CSL.Semantics.step_result st a ->
_: Mkst0?.mem st
-> Type0 | Prims.Tot | [
"total"
] | [] | [
"CSL.Semantics.st",
"CSL.Semantics.__proj__Mkst0__item__hprop",
"CSL.Semantics.post_t",
"CSL.Semantics.m",
"CSL.Semantics.__proj__Mkst0__item__mem",
"CSL.Semantics.step_result",
"Prims.l_and",
"CSL.Semantics.__proj__Mkst0__item__interp",
"CSL.Semantics.__proj__Mkst0__item__star",
"CSL.Semantics.__proj__Mkst0__item__invariant",
"CSL.Semantics.stronger_post",
"CSL.Semantics.preserves_frame"
] | [] | false | false | false | false | true | let step_ens (#st: st) (#a: Type u#a) (#pre: st.hprop) (#post: post_t st a) (f: m st a pre post)
: st.mem -> step_result st a -> st.mem -> Type0 =
| fun m0 r m1 ->
let Step #_ #_ #next_pre #next_post _ = r in
st.interp (next_pre `st.star` (st.invariant m1)) m1 /\ stronger_post post next_post /\
preserves_frame pre next_pre m0 m1 | false |
CSL.Semantics.fst | CSL.Semantics.preserves_frame | val preserves_frame : pre: Mkst0?.hprop st -> post: Mkst0?.hprop st -> m0: Mkst0?.mem st -> m1: Mkst0?.mem st
-> Prims.logical | let preserves_frame (#st:st) (pre post:st.hprop) (m0 m1:st.mem) =
forall (frame:st.hprop).
st.interp ((pre `st.star` frame) `st.star` (st.invariant m0)) m0 ==>
st.interp ((post `st.star` frame) `st.star` (st.invariant m1)) m1 | {
"file_name": "share/steel/examples/steel/CSL.Semantics.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 69,
"end_line": 142,
"start_col": 0,
"start_line": 139
} | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module CSL.Semantics
module P = FStar.Preorder
open FStar.Tactics
open FStar.NMST
(*
* This module provides a semantic model for a combined effect of
* divergence, state, and parallel composition of atomic actions.
*
* It is built over a monotonic state effect -- so that we can give
* lock semantics using monotonicity
*
* Using the semantics, we derive a CSL in a partial correctness setting.
*)
#push-options "--fuel 0 --ifuel 2 --z3rlimit 20 --print_implicits --print_universes \
--using_facts_from 'Prims FStar.Pervasives FStar.Preorder MST NMST CSL.Semantics'"
(**** Begin state defn ****)
/// We start by defining some basic notions for a commutative monoid.
///
/// We could reuse FStar.Algebra.CommMonoid, but this style with
/// quanitifers was more convenient for the proof done here.
let symmetry #a (equals: a -> a -> prop) =
forall x y. {:pattern (x `equals` y)}
x `equals` y ==> y `equals` x
let transitive #a (equals:a -> a -> prop) =
forall x y z. x `equals` y /\ y `equals` z ==> x `equals` z
let associative #a (equals: a -> a -> prop) (f: a -> a -> a)=
forall x y z.
f x (f y z) `equals` f (f x y) z
let commutative #a (equals: a -> a -> prop) (f: a -> a -> a) =
forall x y.{:pattern f x y}
f x y `equals` f y x
let is_unit #a (x:a) (equals: a -> a -> prop) (f:a -> a -> a) =
forall y. {:pattern f x y \/ f y x}
f x y `equals` y /\
f y x `equals` y
let equals_ext #a (equals:a -> a -> prop) (f:a -> a -> a) =
forall x1 x2 y. x1 `equals` x2 ==> f x1 y `equals` f x2 y
noeq
type st0 = {
mem:Type u#2;
evolves:P.preorder mem;
hprop:Type u#2;
invariant: mem -> hprop;
interp: hprop -> mem -> prop;
emp:hprop;
star: hprop -> hprop -> hprop;
equals: hprop -> hprop -> prop;
}
////////////////////////////////////////////////////////////////////////////////
let interp_extensionality #r #s (equals:r -> r -> prop) (f:r -> s -> prop) =
forall x y h. {:pattern equals x y; f x h} equals x y /\ f x h ==> f y h
let affine (st:st0) =
forall r0 r1 s. {:pattern (st.interp (r0 `st.star` r1) s) }
st.interp (r0 `st.star` r1) s ==> st.interp r0 s
////////////////////////////////////////////////////////////////////////////////
let st_laws (st:st0) =
(* standard laws about the equality relation *)
symmetry st.equals /\
transitive st.equals /\
interp_extensionality st.equals st.interp /\
(* standard laws for star forming a CM *)
associative st.equals st.star /\
commutative st.equals st.star /\
is_unit st.emp st.equals st.star /\
equals_ext st.equals st.star /\
(* We're working in an affine interpretation of SL *)
affine st
let st = s:st0 { st_laws s }
(**** End state defn ****)
(**** Begin expects, provides defns ****)
/// expects (the heap assertion expected by a computation) is simply an st.hprop
///
/// provides, or the post heap assertion, is a st.hprop on [a]-typed result
type post_t (st:st) (a:Type) = a -> st.hprop
(**** End expects, provides defns ****)
effect Mst (a:Type) (#st:st) (req:st.mem -> Type0) (ens:st.mem -> a -> st.mem -> Type0) =
NMSTATE a st.mem st.evolves req ens
(**** Begin interface of actions ****)
/// Actions are essentially state transformers that preserve frames | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.NMST.fst.checked",
"FStar.MST.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "CSL.Semantics.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.NMST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "CSL",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | pre: Mkst0?.hprop st -> post: Mkst0?.hprop st -> m0: Mkst0?.mem st -> m1: Mkst0?.mem st
-> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"CSL.Semantics.st",
"CSL.Semantics.__proj__Mkst0__item__hprop",
"CSL.Semantics.__proj__Mkst0__item__mem",
"Prims.l_Forall",
"Prims.l_imp",
"CSL.Semantics.__proj__Mkst0__item__interp",
"CSL.Semantics.__proj__Mkst0__item__star",
"CSL.Semantics.__proj__Mkst0__item__invariant",
"Prims.logical"
] | [] | false | false | false | false | true | let preserves_frame (#st: st) (pre post: st.hprop) (m0 m1: st.mem) =
| forall (frame: st.hprop).
st.interp ((pre `st.star` frame) `st.star` (st.invariant m0)) m0 ==>
st.interp ((post `st.star` frame) `st.star` (st.invariant m1)) m1 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.