file_name
stringlengths 5
52
| name
stringlengths 4
95
| original_source_type
stringlengths 0
23k
| source_type
stringlengths 9
23k
| source_definition
stringlengths 9
57.9k
| source
dict | source_range
dict | file_context
stringlengths 0
721k
| dependencies
dict | opens_and_abbrevs
listlengths 2
94
| vconfig
dict | interleaved
bool 1
class | verbose_type
stringlengths 1
7.42k
| effect
stringclasses 118
values | effect_flags
sequencelengths 0
2
| mutual_with
sequencelengths 0
11
| ideal_premises
sequencelengths 0
236
| proof_features
sequencelengths 0
1
| is_simple_lemma
bool 2
classes | is_div
bool 2
classes | is_proof
bool 2
classes | is_simply_typed
bool 2
classes | is_type
bool 2
classes | partial_definition
stringlengths 5
3.99k
| completed_definiton
stringlengths 1
1.63M
| isa_cross_project_example
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.typing_extensional | val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h) | val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h) | let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 20,
"end_line": 86,
"start_col": 0,
"start_line": 81
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ -> | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: StlcStrongDbParSubst.typing g e t ->
g': StlcStrongDbParSubst.env{FStar.FunctionalExtensionality.feq g g'}
-> Prims.Tot (StlcStrongDbParSubst.typing g' e t) | Prims.Tot | [
"total",
""
] | [] | [
"StlcStrongDbParSubst.exp",
"StlcStrongDbParSubst.env",
"StlcStrongDbParSubst.typ",
"StlcStrongDbParSubst.typing",
"FStar.FunctionalExtensionality.feq",
"StlcStrongDbParSubst.var",
"FStar.Pervasives.Native.option",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"StlcStrongDbParSubst.TyVar",
"StlcStrongDbParSubst.extend",
"StlcStrongDbParSubst.TyLam",
"StlcCbvDbParSubst.typing_extensional",
"StlcStrongDbParSubst.TArr",
"StlcStrongDbParSubst.TyApp",
"StlcStrongDbParSubst.TyUnit"
] | [
"recursion"
] | false | false | false | false | false | let rec typing_extensional #e #g #t h g' =
| match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.appears_free_in | val appears_free_in : x:var -> e:exp -> Tot bool (decreases e) | val appears_free_in : x:var -> e:exp -> Tot bool (decreases e) | let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 18,
"end_line": 94,
"start_col": 0,
"start_line": 89
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: StlcStrongDbParSubst.var -> e: StlcStrongDbParSubst.exp -> Prims.Tot Prims.bool | Prims.Tot | [
"total",
""
] | [] | [
"StlcStrongDbParSubst.var",
"StlcStrongDbParSubst.exp",
"Prims.op_Equality",
"Prims.op_BarBar",
"StlcCbvDbParSubst.appears_free_in",
"StlcStrongDbParSubst.typ",
"Prims.op_Addition",
"Prims.bool"
] | [
"recursion"
] | false | false | false | true | false | let rec appears_free_in x e =
| match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x + 1) e1
| EUnit -> false | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.free_in_context | val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h) | val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h) | let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> () | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 16,
"end_line": 120,
"start_col": 0,
"start_line": 115
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t -> | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: StlcStrongDbParSubst.var -> h: StlcStrongDbParSubst.typing g e t
-> FStar.Pervasives.Lemma (ensures StlcCbvDbParSubst.appears_free_in x e ==> Some? (g x))
(decreases h) | FStar.Pervasives.Lemma | [
"lemma",
""
] | [] | [
"StlcStrongDbParSubst.var",
"StlcStrongDbParSubst.exp",
"StlcStrongDbParSubst.env",
"StlcStrongDbParSubst.typ",
"StlcStrongDbParSubst.typing",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"StlcStrongDbParSubst.extend",
"StlcCbvDbParSubst.free_in_context",
"Prims.op_Addition",
"StlcStrongDbParSubst.TArr",
"Prims.unit"
] | [
"recursion"
] | false | false | true | false | false | let rec free_in_context x #e #g #t h =
| match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x + 1) h1
| TyApp h1 h2 ->
free_in_context x h1;
free_in_context x h2
| TyUnit -> () | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.typable_empty_not_free | val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e))) | val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e))) | let typable_empty_not_free x #e #t h = free_in_context x h | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 58,
"end_line": 125,
"start_col": 0,
"start_line": 125
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e))) | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: StlcStrongDbParSubst.var -> h: StlcStrongDbParSubst.typing StlcStrongDbParSubst.empty e t
-> FStar.Pervasives.Lemma (ensures Prims.op_Negation (StlcCbvDbParSubst.appears_free_in x e)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"StlcStrongDbParSubst.var",
"StlcStrongDbParSubst.exp",
"StlcStrongDbParSubst.typ",
"StlcStrongDbParSubst.typing",
"StlcStrongDbParSubst.empty",
"StlcCbvDbParSubst.free_in_context",
"Prims.unit"
] | [] | true | false | true | false | false | let typable_empty_not_free x #e #t h =
| free_in_context x h | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.below | val below : x:var -> e:exp -> Tot bool (decreases e) | val below : x:var -> e:exp -> Tot bool (decreases e) | let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 17,
"end_line": 133,
"start_col": 0,
"start_line": 128
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: StlcStrongDbParSubst.var -> e: StlcStrongDbParSubst.exp -> Prims.Tot Prims.bool | Prims.Tot | [
"total",
""
] | [] | [
"StlcStrongDbParSubst.var",
"StlcStrongDbParSubst.exp",
"Prims.op_LessThan",
"Prims.op_AmpAmp",
"StlcCbvDbParSubst.below",
"StlcStrongDbParSubst.typ",
"Prims.op_Addition",
"Prims.bool"
] | [
"recursion"
] | false | false | false | true | false | let rec below x e =
| match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x + 1) e1
| EUnit -> true | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.context_invariance | val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h) | val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h) | let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 20,
"end_line": 111,
"start_col": 0,
"start_line": 104
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} -> | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: StlcStrongDbParSubst.typing g e t ->
g': StlcStrongDbParSubst.env{StlcCbvDbParSubst.envEqualE e g g'}
-> Prims.Tot (StlcStrongDbParSubst.typing g' e t) | Prims.Tot | [
"total",
""
] | [] | [
"StlcStrongDbParSubst.exp",
"StlcStrongDbParSubst.env",
"StlcStrongDbParSubst.typ",
"StlcStrongDbParSubst.typing",
"StlcCbvDbParSubst.envEqualE",
"StlcStrongDbParSubst.var",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"StlcStrongDbParSubst.TyVar",
"StlcStrongDbParSubst.extend",
"StlcStrongDbParSubst.TyLam",
"StlcCbvDbParSubst.context_invariance",
"StlcStrongDbParSubst.TArr",
"StlcStrongDbParSubst.TyApp",
"StlcStrongDbParSubst.TyUnit"
] | [
"recursion"
] | false | false | false | false | false | let rec context_invariance #e #g #t h g' =
| match h with
| TyVar x -> TyVar x
| TyLam t_y h1 -> TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 -> TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.sub_beta_gen | val sub_beta_gen : var -> exp -> Tot sub | val sub_beta_gen : var -> exp -> Tot sub | let sub_beta_gen x v = fun y -> if y < x then (EVar y)
else if y = x then v (* substitute *)
else (EVar (y-1)) | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 49,
"end_line": 173,
"start_col": 0,
"start_line": 171
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h
val below : x:var -> e:exp -> Tot bool (decreases e)
let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true
val closed : exp -> Tot bool
let closed e = below 0 e
(* at some point we could try again to relate closed and appears_free *)
(* this didn't work for some reason
forall_intro #var #(fun (x:var) -> not (appears_free_in x e))
(fun (x:var) -> typable_empty_closed x h)
*)
type pclosed (e:exp) = (forall (x:var). not (appears_free_in x e))
assume val closed_appears_free : e:exp{closed e} -> Lemma (ensures (pclosed e))
assume val appears_free_closed : e:exp{pclosed e} -> Lemma (ensures (closed e))
(*
let rec appears_free_closed e =
match e with
| EVar _ -> ()
| EApp e1 e2 -> appears_free_closed e1; appears_free_closed e2
| ELam _ e1 -> appears_free_closed e1
*)
type below_env (x:var) (g:env) = (forall (y:var). y >= x ==> g y = None)
val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h)
let rec typable_below x #g #e #t h =
match h with
| TyVar y -> ()
| TyApp h1 h2 -> typable_below x h1; typable_below x h2
| TyLam _y h1 -> typable_below (x+1) h1
| TyUnit -> ()
val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (closed e))
let typable_empty_closed #e #t h = typable_below 0 h | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: StlcStrongDbParSubst.var -> v: StlcStrongDbParSubst.exp -> StlcStrongDbParSubst.sub | Prims.Tot | [
"total"
] | [] | [
"StlcStrongDbParSubst.var",
"StlcStrongDbParSubst.exp",
"Prims.op_LessThan",
"StlcStrongDbParSubst.EVar",
"Prims.bool",
"Prims.op_Equality",
"Prims.op_Subtraction"
] | [] | false | false | false | true | false | let sub_beta_gen x v =
| fun y -> if y < x then (EVar y) else if y = x then v else (EVar (y - 1)) | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.typable_empty_closed | val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (closed e)) | val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (closed e)) | let typable_empty_closed #e #t h = typable_below 0 h | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 52,
"end_line": 168,
"start_col": 0,
"start_line": 168
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h
val below : x:var -> e:exp -> Tot bool (decreases e)
let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true
val closed : exp -> Tot bool
let closed e = below 0 e
(* at some point we could try again to relate closed and appears_free *)
(* this didn't work for some reason
forall_intro #var #(fun (x:var) -> not (appears_free_in x e))
(fun (x:var) -> typable_empty_closed x h)
*)
type pclosed (e:exp) = (forall (x:var). not (appears_free_in x e))
assume val closed_appears_free : e:exp{closed e} -> Lemma (ensures (pclosed e))
assume val appears_free_closed : e:exp{pclosed e} -> Lemma (ensures (closed e))
(*
let rec appears_free_closed e =
match e with
| EVar _ -> ()
| EApp e1 e2 -> appears_free_closed e1; appears_free_closed e2
| ELam _ e1 -> appears_free_closed e1
*)
type below_env (x:var) (g:env) = (forall (y:var). y >= x ==> g y = None)
val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h)
let rec typable_below x #g #e #t h =
match h with
| TyVar y -> ()
| TyApp h1 h2 -> typable_below x h1; typable_below x h2
| TyLam _y h1 -> typable_below (x+1) h1
| TyUnit -> ()
val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t -> | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: StlcStrongDbParSubst.typing StlcStrongDbParSubst.empty e t
-> FStar.Pervasives.Lemma (ensures StlcCbvDbParSubst.closed e) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"StlcStrongDbParSubst.exp",
"StlcStrongDbParSubst.typ",
"StlcStrongDbParSubst.typing",
"StlcStrongDbParSubst.empty",
"StlcCbvDbParSubst.typable_below",
"Prims.unit"
] | [] | true | false | true | false | false | let typable_empty_closed #e #t h =
| typable_below 0 h | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.extend_gen_typing_conversion | val extend_gen_typing_conversion
(#t: typ)
(#g: env)
(#e0: exp)
(#t0: typ)
(h: typing (extend t g) e0 t0)
: Tot (typing (extend_gen 0 t g) e0 t0) | val extend_gen_typing_conversion
(#t: typ)
(#g: env)
(#e0: exp)
(#t0: typ)
(h: typing (extend t g) e0 t0)
: Tot (typing (extend_gen 0 t g) e0 t0) | let rec extend_gen_typing_conversion (#t:typ) (#g:env) (#e0:exp) (#t0:typ) (h:typing (extend t g) e0 t0)
:Tot (typing (extend_gen 0 t g) e0 t0) = h | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 44,
"end_line": 278,
"start_col": 0,
"start_line": 277
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h
val below : x:var -> e:exp -> Tot bool (decreases e)
let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true
val closed : exp -> Tot bool
let closed e = below 0 e
(* at some point we could try again to relate closed and appears_free *)
(* this didn't work for some reason
forall_intro #var #(fun (x:var) -> not (appears_free_in x e))
(fun (x:var) -> typable_empty_closed x h)
*)
type pclosed (e:exp) = (forall (x:var). not (appears_free_in x e))
assume val closed_appears_free : e:exp{closed e} -> Lemma (ensures (pclosed e))
assume val appears_free_closed : e:exp{pclosed e} -> Lemma (ensures (closed e))
(*
let rec appears_free_closed e =
match e with
| EVar _ -> ()
| EApp e1 e2 -> appears_free_closed e1; appears_free_closed e2
| ELam _ e1 -> appears_free_closed e1
*)
type below_env (x:var) (g:env) = (forall (y:var). y >= x ==> g y = None)
val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h)
let rec typable_below x #g #e #t h =
match h with
| TyVar y -> ()
| TyApp h1 h2 -> typable_below x h1; typable_below x h2
| TyLam _y h1 -> typable_below (x+1) h1
| TyUnit -> ()
val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (closed e))
let typable_empty_closed #e #t h = typable_below 0 h
val sub_beta_gen : var -> exp -> Tot sub
let sub_beta_gen x v = fun y -> if y < x then (EVar y)
else if y = x then v (* substitute *)
else (EVar (y-1)) (* shift -1 *)
val subst_gen_var_lt : x:var -> y:var{y < x} -> v:exp -> Lemma
(ensures (subst (sub_beta_gen x v) (EVar y) = (EVar y)))
let subst_gen_var_lt x y v = ()
val extend_lt : x:var -> y:var{y < x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g y)
let extend_lt x y g t_x = ()
val extend_gt : x:var -> y:var{y > x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g (y-1))
let extend_gt x y g t_x = ()
val extend_twice : x:var -> g:env -> t_x:typ -> t_y:typ -> Lemma
(ensures (feq (extend_gen 0 t_y (extend_gen x t_x g) )
(extend_gen (x+1) t_x (extend_gen 0 t_y g))))
let extend_twice x g t_x t_y = ()
type sub_below (x:var) (s:sub) = (forall (y:var). y<x ==> s y = EVar y)
val subst_below : x:var -> v:exp{below x v} -> s:sub{sub_below x s} ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_below x v s =
match v with
| EVar y -> ()
| EApp e1 e2 -> subst_below x e1 s; subst_below x e2 s
| ELam t e -> (subst_below (x+1) e (sub_elam s);
assert(e = subst (sub_elam s) e);
assert(v = ELam t e);
assert(subst s v = ELam t (subst (sub_elam s) e)))
| EUnit -> ()
val subst_closed : v:exp{closed v} -> s:sub ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_closed v s = subst_below 0 v s
val subst_gen_elam_aux : x:var -> v:exp{closed v} -> y:var -> Lemma
(ensures ((sub_elam (sub_beta_gen x v)) y =
(sub_beta_gen (x+1) v) y))
let subst_gen_elam_aux x v y =
if y = 0 then ()
else
(assert((sub_elam (sub_beta_gen x v)) y =
(subst sub_inc (sub_beta_gen x v (y-1))));
if y-1 < x then ()
else if y-1 = x then
(assert(sub_beta_gen x v (y-1) = v);
assert(sub_beta_gen (x+1) v y = v);
subst_closed v sub_inc)
else ())
val subst_gen_elam_aux_forall : x:var -> v:exp{closed v} -> Lemma
(ensures (feq (sub_elam (sub_beta_gen x v))
(sub_beta_gen (x+1) v)))
let subst_gen_elam_aux_forall x v = admit()
(* should follow from subst_gen_elam_aux and forall_intro *)
val subst_gen_elam : x:var -> v:exp{closed v} -> t_y:typ -> e':exp -> Lemma
(ensures (subst (sub_beta_gen x v) (ELam t_y e') =
ELam t_y (subst (sub_beta_gen (x+1) v) e')))
let subst_gen_elam x v t_y e' =
subst_gen_elam_aux_forall x v;
subst_extensional (sub_elam (sub_beta_gen x v))
(sub_beta_gen (x+1) v) e';
assert(subst (sub_beta_gen x v) (ELam t_y e')
= ELam t_y (subst (sub_elam (sub_beta_gen x v)) e'))
val substitution_preserves_typing :
x:var -> #e:exp -> #v:exp -> #t_x:typ -> #t:typ -> #g:env ->
$h1:typing empty v t_x ->
$h2:typing (extend_gen x t_x g) e t ->
Tot (typing g (subst (sub_beta_gen x v) e) t) (decreases e)
let rec substitution_preserves_typing x #e #v #t_x #t #g h1 h2 =
match h2 with
| TyVar y ->
if x=y then (typable_empty_closed h1;
closed_appears_free v;
context_invariance h1 g)
else if y<x then context_invariance h2 g
else TyVar (y-1)
| TyLam #_ t_y #e' #t' h21 ->
let h21' = typing_extensional h21 (extend_gen (x+1) t_x (extend t_y g)) in
typable_empty_closed h1;
subst_gen_elam x v t_y e';
let h21' : (r:typing (extend_gen (x+1) t_x (extend t_y g)) e' t'{e' << e}) =
h21' in
TyLam t_y (substitution_preserves_typing (x+1) h1 h21')
| TyApp #_ #e1 #e2 #t11 #t12 h21 h22 ->
let h21 : (r:typing (extend_gen x t_x g) e1 (TArr t11 t12){e1 << e}) = h21 in
let h22 : (r:typing (extend_gen x t_x g) e2 t11{e2 << e}) = h22 in
(TyApp (substitution_preserves_typing x h1 h21)
(substitution_preserves_typing x h1 h22))
| TyUnit -> TyUnit
val extend_gen_0_aux : t:typ -> g:env -> y:var ->
Lemma (extend_gen 0 t g y = extend t g y)
let extend_gen_0_aux t g y = ()
val extend_gen_0 : t:typ -> g:env ->
Lemma (feq (extend_gen 0 t g) (extend t g))
let extend_gen_0 t g =
forall_intro (extend_gen_0_aux t g) | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: StlcStrongDbParSubst.typing (StlcStrongDbParSubst.extend t g) e0 t0
-> StlcStrongDbParSubst.typing (StlcCbvDbParSubst.extend_gen 0 t g) e0 t0 | Prims.Tot | [
"total"
] | [] | [
"StlcStrongDbParSubst.typ",
"StlcStrongDbParSubst.env",
"StlcStrongDbParSubst.exp",
"StlcStrongDbParSubst.typing",
"StlcStrongDbParSubst.extend",
"StlcCbvDbParSubst.extend_gen"
] | [
"recursion"
] | false | false | false | false | false | let rec extend_gen_typing_conversion
(#t: typ)
(#g: env)
(#e0: exp)
(#t0: typ)
(h: typing (extend t g) e0 t0)
: Tot (typing (extend_gen 0 t g) e0 t0) =
| h | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.typable_below | val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h) | val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h) | let rec typable_below x #g #e #t h =
match h with
| TyVar y -> ()
| TyApp h1 h2 -> typable_below x h1; typable_below x h2
| TyLam _y h1 -> typable_below (x+1) h1
| TyUnit -> () | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 16,
"end_line": 164,
"start_col": 0,
"start_line": 159
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h
val below : x:var -> e:exp -> Tot bool (decreases e)
let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true
val closed : exp -> Tot bool
let closed e = below 0 e
(* at some point we could try again to relate closed and appears_free *)
(* this didn't work for some reason
forall_intro #var #(fun (x:var) -> not (appears_free_in x e))
(fun (x:var) -> typable_empty_closed x h)
*)
type pclosed (e:exp) = (forall (x:var). not (appears_free_in x e))
assume val closed_appears_free : e:exp{closed e} -> Lemma (ensures (pclosed e))
assume val appears_free_closed : e:exp{pclosed e} -> Lemma (ensures (closed e))
(*
let rec appears_free_closed e =
match e with
| EVar _ -> ()
| EApp e1 e2 -> appears_free_closed e1; appears_free_closed e2
| ELam _ e1 -> appears_free_closed e1
*)
type below_env (x:var) (g:env) = (forall (y:var). y >= x ==> g y = None)
val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} -> | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
x: StlcStrongDbParSubst.var ->
h: StlcStrongDbParSubst.typing g e t {StlcCbvDbParSubst.below_env x g}
-> FStar.Pervasives.Lemma (ensures StlcCbvDbParSubst.below x e) (decreases h) | FStar.Pervasives.Lemma | [
"lemma",
""
] | [] | [
"StlcStrongDbParSubst.var",
"StlcStrongDbParSubst.env",
"StlcStrongDbParSubst.exp",
"StlcStrongDbParSubst.typ",
"StlcStrongDbParSubst.typing",
"StlcCbvDbParSubst.below_env",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"StlcStrongDbParSubst.TArr",
"StlcCbvDbParSubst.typable_below",
"Prims.unit",
"StlcStrongDbParSubst.extend",
"Prims.op_Addition"
] | [
"recursion"
] | false | false | true | false | false | let rec typable_below x #g #e #t h =
| match h with
| TyVar y -> ()
| TyApp h1 h2 ->
typable_below x h1;
typable_below x h2
| TyLam _y h1 -> typable_below (x + 1) h1
| TyUnit -> () | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.subst_below | val subst_below : x:var -> v:exp{below x v} -> s:sub{sub_below x s} ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v) | val subst_below : x:var -> v:exp{below x v} -> s:sub{sub_below x s} ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v) | let rec subst_below x v s =
match v with
| EVar y -> ()
| EApp e1 e2 -> subst_below x e1 s; subst_below x e2 s
| ELam t e -> (subst_below (x+1) e (sub_elam s);
assert(e = subst (sub_elam s) e);
assert(v = ELam t e);
assert(subst s v = ELam t (subst (sub_elam s) e)))
| EUnit -> () | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 15,
"end_line": 204,
"start_col": 0,
"start_line": 196
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h
val below : x:var -> e:exp -> Tot bool (decreases e)
let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true
val closed : exp -> Tot bool
let closed e = below 0 e
(* at some point we could try again to relate closed and appears_free *)
(* this didn't work for some reason
forall_intro #var #(fun (x:var) -> not (appears_free_in x e))
(fun (x:var) -> typable_empty_closed x h)
*)
type pclosed (e:exp) = (forall (x:var). not (appears_free_in x e))
assume val closed_appears_free : e:exp{closed e} -> Lemma (ensures (pclosed e))
assume val appears_free_closed : e:exp{pclosed e} -> Lemma (ensures (closed e))
(*
let rec appears_free_closed e =
match e with
| EVar _ -> ()
| EApp e1 e2 -> appears_free_closed e1; appears_free_closed e2
| ELam _ e1 -> appears_free_closed e1
*)
type below_env (x:var) (g:env) = (forall (y:var). y >= x ==> g y = None)
val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h)
let rec typable_below x #g #e #t h =
match h with
| TyVar y -> ()
| TyApp h1 h2 -> typable_below x h1; typable_below x h2
| TyLam _y h1 -> typable_below (x+1) h1
| TyUnit -> ()
val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (closed e))
let typable_empty_closed #e #t h = typable_below 0 h
val sub_beta_gen : var -> exp -> Tot sub
let sub_beta_gen x v = fun y -> if y < x then (EVar y)
else if y = x then v (* substitute *)
else (EVar (y-1)) (* shift -1 *)
val subst_gen_var_lt : x:var -> y:var{y < x} -> v:exp -> Lemma
(ensures (subst (sub_beta_gen x v) (EVar y) = (EVar y)))
let subst_gen_var_lt x y v = ()
val extend_lt : x:var -> y:var{y < x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g y)
let extend_lt x y g t_x = ()
val extend_gt : x:var -> y:var{y > x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g (y-1))
let extend_gt x y g t_x = ()
val extend_twice : x:var -> g:env -> t_x:typ -> t_y:typ -> Lemma
(ensures (feq (extend_gen 0 t_y (extend_gen x t_x g) )
(extend_gen (x+1) t_x (extend_gen 0 t_y g))))
let extend_twice x g t_x t_y = ()
type sub_below (x:var) (s:sub) = (forall (y:var). y<x ==> s y = EVar y)
val subst_below : x:var -> v:exp{below x v} -> s:sub{sub_below x s} -> | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
x: StlcStrongDbParSubst.var ->
v: StlcStrongDbParSubst.exp{StlcCbvDbParSubst.below x v} ->
s: StlcStrongDbParSubst.sub{StlcCbvDbParSubst.sub_below x s}
-> FStar.Pervasives.Lemma (ensures v = StlcStrongDbParSubst.subst s v) (decreases v) | FStar.Pervasives.Lemma | [
"lemma",
""
] | [] | [
"StlcStrongDbParSubst.var",
"StlcStrongDbParSubst.exp",
"Prims.b2t",
"StlcCbvDbParSubst.below",
"StlcStrongDbParSubst.sub",
"StlcCbvDbParSubst.sub_below",
"StlcCbvDbParSubst.subst_below",
"Prims.unit",
"StlcStrongDbParSubst.typ",
"Prims._assert",
"Prims.op_Equality",
"StlcStrongDbParSubst.subst",
"StlcStrongDbParSubst.ELam",
"StlcStrongDbParSubst.sub_elam",
"Prims.op_Addition"
] | [
"recursion"
] | false | false | true | false | false | let rec subst_below x v s =
| match v with
| EVar y -> ()
| EApp e1 e2 ->
subst_below x e1 s;
subst_below x e2 s
| ELam t e ->
(subst_below (x + 1) e (sub_elam s);
assert (e = subst (sub_elam s) e);
assert (v = ELam t e);
assert (subst s v = ELam t (subst (sub_elam s) e)))
| EUnit -> () | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.subst_closed | val subst_closed : v:exp{closed v} -> s:sub ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v) | val subst_closed : v:exp{closed v} -> s:sub ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v) | let rec subst_closed v s = subst_below 0 v s | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 44,
"end_line": 208,
"start_col": 0,
"start_line": 208
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h
val below : x:var -> e:exp -> Tot bool (decreases e)
let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true
val closed : exp -> Tot bool
let closed e = below 0 e
(* at some point we could try again to relate closed and appears_free *)
(* this didn't work for some reason
forall_intro #var #(fun (x:var) -> not (appears_free_in x e))
(fun (x:var) -> typable_empty_closed x h)
*)
type pclosed (e:exp) = (forall (x:var). not (appears_free_in x e))
assume val closed_appears_free : e:exp{closed e} -> Lemma (ensures (pclosed e))
assume val appears_free_closed : e:exp{pclosed e} -> Lemma (ensures (closed e))
(*
let rec appears_free_closed e =
match e with
| EVar _ -> ()
| EApp e1 e2 -> appears_free_closed e1; appears_free_closed e2
| ELam _ e1 -> appears_free_closed e1
*)
type below_env (x:var) (g:env) = (forall (y:var). y >= x ==> g y = None)
val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h)
let rec typable_below x #g #e #t h =
match h with
| TyVar y -> ()
| TyApp h1 h2 -> typable_below x h1; typable_below x h2
| TyLam _y h1 -> typable_below (x+1) h1
| TyUnit -> ()
val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (closed e))
let typable_empty_closed #e #t h = typable_below 0 h
val sub_beta_gen : var -> exp -> Tot sub
let sub_beta_gen x v = fun y -> if y < x then (EVar y)
else if y = x then v (* substitute *)
else (EVar (y-1)) (* shift -1 *)
val subst_gen_var_lt : x:var -> y:var{y < x} -> v:exp -> Lemma
(ensures (subst (sub_beta_gen x v) (EVar y) = (EVar y)))
let subst_gen_var_lt x y v = ()
val extend_lt : x:var -> y:var{y < x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g y)
let extend_lt x y g t_x = ()
val extend_gt : x:var -> y:var{y > x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g (y-1))
let extend_gt x y g t_x = ()
val extend_twice : x:var -> g:env -> t_x:typ -> t_y:typ -> Lemma
(ensures (feq (extend_gen 0 t_y (extend_gen x t_x g) )
(extend_gen (x+1) t_x (extend_gen 0 t_y g))))
let extend_twice x g t_x t_y = ()
type sub_below (x:var) (s:sub) = (forall (y:var). y<x ==> s y = EVar y)
val subst_below : x:var -> v:exp{below x v} -> s:sub{sub_below x s} ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_below x v s =
match v with
| EVar y -> ()
| EApp e1 e2 -> subst_below x e1 s; subst_below x e2 s
| ELam t e -> (subst_below (x+1) e (sub_elam s);
assert(e = subst (sub_elam s) e);
assert(v = ELam t e);
assert(subst s v = ELam t (subst (sub_elam s) e)))
| EUnit -> ()
val subst_closed : v:exp{closed v} -> s:sub -> | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | v: StlcStrongDbParSubst.exp{StlcCbvDbParSubst.closed v} -> s: StlcStrongDbParSubst.sub
-> FStar.Pervasives.Lemma (ensures v = StlcStrongDbParSubst.subst s v) (decreases v) | FStar.Pervasives.Lemma | [
"lemma",
""
] | [] | [
"StlcStrongDbParSubst.exp",
"Prims.b2t",
"StlcCbvDbParSubst.closed",
"StlcStrongDbParSubst.sub",
"StlcCbvDbParSubst.subst_below",
"Prims.unit"
] | [
"recursion"
] | false | false | true | false | false | let rec subst_closed v s =
| subst_below 0 v s | false |
OWGCounter.fst | OWGCounter.lock_inv_equiv_lemma | val lock_inv_equiv_lemma (r: ref int) (r1 r2: ghost_ref int)
: Lemma ((lock_inv r r1 r2) `equiv` (lock_inv r r2 r1)) | val lock_inv_equiv_lemma (r: ref int) (r1 r2: ghost_ref int)
: Lemma ((lock_inv r r1 r2) `equiv` (lock_inv r r2 r1)) | let lock_inv_equiv_lemma (r:ref int) (r1 r2:ghost_ref int)
: Lemma (lock_inv r r1 r2 `equiv` lock_inv r r2 r1)
=
let aux (r:ref int) (r1 r2:ghost_ref int) (m:mem)
: Lemma
(requires interp (hp_of (lock_inv r r1 r2)) m)
(ensures interp (hp_of (lock_inv r r2 r1)) m)
[SMTPat ()]
= assert (
Steel.Memory.h_exists #(int & int) (fun x -> hp_of (lock_inv_pred r r1 r2 x)) ==
h_exists_sl #(int & int) (lock_inv_pred r r1 r2))
by (FStar.Tactics.norm [delta_only [`%h_exists_sl]]);
let w : G.erased (int & int) = id_elim_exists (fun x -> hp_of (lock_inv_pred r r1 r2 x)) m in
assert ((ghost_pts_to r1 half_perm (snd (snd w, fst w)) `star`
ghost_pts_to r2 half_perm (fst (snd w, fst w)) `star`
pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w))) `equiv`
(ghost_pts_to r2 half_perm (fst (snd w, fst w)) `star`
ghost_pts_to r1 half_perm (snd (snd w, fst w)) `star`
pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w)))) by (FStar.Tactics.norm [delta_attr [`%__steel_reduce__]]; canon' false (`true_p) (`true_p));
reveal_equiv
(ghost_pts_to r1 half_perm (snd (snd w, fst w)) `star`
ghost_pts_to r2 half_perm (fst (snd w, fst w)) `star`
pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w)))
(ghost_pts_to r2 half_perm (fst (snd w, fst w)) `star`
ghost_pts_to r1 half_perm (snd (snd w, fst w)) `star`
pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w)));
assert (interp (hp_of (lock_inv_pred r r2 r1 (snd w, fst w))) m);
intro_h_exists (snd w, fst w) (fun x -> hp_of (lock_inv_pred r r2 r1 x)) m;
assert (interp (Steel.Memory.h_exists (fun x -> hp_of (lock_inv_pred r r2 r1 x))) m);
assert (
Steel.Memory.h_exists #(int & int) (fun x -> hp_of (lock_inv_pred r r2 r1 x)) ==
h_exists_sl #(int & int) (lock_inv_pred r r2 r1))
by (FStar.Tactics.norm [delta_only [`%h_exists_sl]])
in
reveal_equiv (lock_inv r r1 r2) (lock_inv r r2 r1) | {
"file_name": "share/steel/examples/steel/OWGCounter.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 52,
"end_line": 123,
"start_col": 0,
"start_line": 83
} | (*
Copyright 2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
(*
* An implementation of the parallel counter presented by Owicki and Gries
* "Verifying properties of parallel programs: An axiomatic approach.", CACM'76
*
* In this example, the main thread forks two worker thread that both
* increment a shared counter. The goal of the example is to show that
* after both the worker threads are done, the value of the counter is
* its original value + 2.
*
* See http://pm.inf.ethz.ch/publications/getpdf.php for an implementation
* of the OWG counters in the Chalice framework.
*)
module OWGCounter
module G = FStar.Ghost
open Steel.Memory
open Steel.FractionalPermission
open Steel.Reference
open Steel.SpinLock
open Steel.Effect.Atomic
open Steel.Effect
module R = Steel.Reference
module P = Steel.FractionalPermission
module A = Steel.Effect.Atomic
#set-options "--ide_id_info_off --using_facts_from '* -FStar.Tactics -FStar.Reflection' --fuel 0 --ifuel 0"
let half_perm = half_perm full_perm
(* Some basic wrappers to avoid issues with normalization.
TODO: The frame inference tactic should not normalize fst and snd*)
noextract
let fst = fst
noextract
let snd = snd
/// The core invariant of the Owicki-Gries counter, shared by the two parties.
/// The concrete counter [r] is shared, and the full permission is stored in the invariant.
/// Each party also has half permission to their own ghost counter [r1] or [r2], ensuring that
/// only them can modify it by retrieving the other half of the permission when accessing the invariant.
/// The `__reduce__` attribute indicates the frame inference tactic to unfold this predicate for frame inference only
[@@ __reduce__]
let lock_inv_slprop (r:ref int) (r1 r2:ghost_ref int) (w:int & int) =
ghost_pts_to r1 half_perm (fst w) `star`
ghost_pts_to r2 half_perm (snd w) `star`
pts_to r full_perm (fst w + snd w)
[@@ __reduce__]
let lock_inv_pred (r:ref int) (r1 r2:ghost_ref int) =
fun (x:int & int) -> lock_inv_slprop r r1 r2 x
/// The actual invariant, existentially quantifying over the values currently stored in the two ghost references
[@@ __reduce__]
let lock_inv (r:ref int) (r1 r2:ghost_ref int) : vprop =
h_exists (lock_inv_pred r r1 r2)
#push-options "--warn_error -271 --fuel 1 --ifuel 1" | {
"checked_file": "/",
"dependencies": [
"Steel.SpinLock.fsti.checked",
"Steel.Reference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "OWGCounter.fst"
} | [
{
"abbrev": true,
"full_module": "Steel.Effect.Atomic",
"short_module": "A"
},
{
"abbrev": true,
"full_module": "Steel.FractionalPermission",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Steel.Reference",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.SpinLock",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Reference",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 1,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
r: Steel.Reference.ref Prims.int ->
r1: Steel.Reference.ghost_ref Prims.int ->
r2: Steel.Reference.ghost_ref Prims.int
-> FStar.Pervasives.Lemma
(ensures Steel.Effect.Common.equiv (OWGCounter.lock_inv r r1 r2) (OWGCounter.lock_inv r r2 r1)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Steel.Reference.ref",
"Prims.int",
"Steel.Reference.ghost_ref",
"Steel.Effect.Common.reveal_equiv",
"OWGCounter.lock_inv",
"Steel.Memory.mem",
"Prims.unit",
"Steel.Memory.interp",
"Steel.Effect.Common.hp_of",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil",
"FStar.Tactics.Effect.assert_by_tactic",
"Prims.eq2",
"Steel.Memory.slprop",
"Steel.Memory.h_exists",
"FStar.Pervasives.Native.tuple2",
"OWGCounter.lock_inv_pred",
"Steel.Effect.Atomic.h_exists_sl",
"FStar.Stubs.Tactics.V1.Builtins.norm",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.delta_only",
"Prims.string",
"Prims._assert",
"Steel.Memory.intro_h_exists",
"FStar.Pervasives.Native.Mktuple2",
"OWGCounter.snd",
"FStar.Ghost.reveal",
"OWGCounter.fst",
"Steel.Effect.Common.star",
"Steel.Reference.ghost_pts_to",
"OWGCounter.half_perm",
"Steel.Reference.pts_to",
"Steel.FractionalPermission.full_perm",
"Prims.op_Addition",
"Steel.Effect.Common.equiv",
"Steel.Effect.Common.canon'",
"FStar.Pervasives.delta_attr",
"FStar.Ghost.erased",
"Steel.Memory.id_elim_exists",
"Prims.l_True"
] | [] | false | false | true | false | false | let lock_inv_equiv_lemma (r: ref int) (r1 r2: ghost_ref int)
: Lemma ((lock_inv r r1 r2) `equiv` (lock_inv r r2 r1)) =
| let aux (r: ref int) (r1 r2: ghost_ref int) (m: mem)
: Lemma (requires interp (hp_of (lock_inv r r1 r2)) m)
(ensures interp (hp_of (lock_inv r r2 r1)) m)
[SMTPat ()] =
FStar.Tactics.Effect.assert_by_tactic (Steel.Memory.h_exists #(int & int)
(fun x -> hp_of (lock_inv_pred r r1 r2 x)) ==
h_exists_sl #(int & int) (lock_inv_pred r r1 r2))
(fun _ ->
();
(FStar.Tactics.norm [delta_only [`%h_exists_sl]]));
let w:G.erased (int & int) = id_elim_exists (fun x -> hp_of (lock_inv_pred r r1 r2 x)) m in
FStar.Tactics.Effect.assert_by_tactic ((((ghost_pts_to r1 half_perm (snd (snd w, fst w)))
`star`
(ghost_pts_to r2 half_perm (fst (snd w, fst w))))
`star`
(pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w))))
`equiv`
(((ghost_pts_to r2 half_perm (fst (snd w, fst w)))
`star`
(ghost_pts_to r1 half_perm (snd (snd w, fst w))))
`star`
(pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w)))))
(fun _ ->
();
(FStar.Tactics.norm [delta_attr [`%__steel_reduce__]];
canon' false (`true_p) (`true_p)));
reveal_equiv (((ghost_pts_to r1 half_perm (snd (snd w, fst w)))
`star`
(ghost_pts_to r2 half_perm (fst (snd w, fst w))))
`star`
(pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w))))
(((ghost_pts_to r2 half_perm (fst (snd w, fst w)))
`star`
(ghost_pts_to r1 half_perm (snd (snd w, fst w))))
`star`
(pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w))));
assert (interp (hp_of (lock_inv_pred r r2 r1 (snd w, fst w))) m);
intro_h_exists (snd w, fst w) (fun x -> hp_of (lock_inv_pred r r2 r1 x)) m;
assert (interp (Steel.Memory.h_exists (fun x -> hp_of (lock_inv_pred r r2 r1 x))) m);
FStar.Tactics.Effect.assert_by_tactic (Steel.Memory.h_exists #(int & int)
(fun x -> hp_of (lock_inv_pred r r2 r1 x)) ==
h_exists_sl #(int & int) (lock_inv_pred r r2 r1))
(fun _ ->
();
(FStar.Tactics.norm [delta_only [`%h_exists_sl]]))
in
reveal_equiv (lock_inv r r1 r2) (lock_inv r r2 r1) | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.subst_gen_elam_aux | val subst_gen_elam_aux : x:var -> v:exp{closed v} -> y:var -> Lemma
(ensures ((sub_elam (sub_beta_gen x v)) y =
(sub_beta_gen (x+1) v) y)) | val subst_gen_elam_aux : x:var -> v:exp{closed v} -> y:var -> Lemma
(ensures ((sub_elam (sub_beta_gen x v)) y =
(sub_beta_gen (x+1) v) y)) | let subst_gen_elam_aux x v y =
if y = 0 then ()
else
(assert((sub_elam (sub_beta_gen x v)) y =
(subst sub_inc (sub_beta_gen x v (y-1))));
if y-1 < x then ()
else if y-1 = x then
(assert(sub_beta_gen x v (y-1) = v);
assert(sub_beta_gen (x+1) v y = v);
subst_closed v sub_inc)
else ()) | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 13,
"end_line": 223,
"start_col": 0,
"start_line": 213
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h
val below : x:var -> e:exp -> Tot bool (decreases e)
let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true
val closed : exp -> Tot bool
let closed e = below 0 e
(* at some point we could try again to relate closed and appears_free *)
(* this didn't work for some reason
forall_intro #var #(fun (x:var) -> not (appears_free_in x e))
(fun (x:var) -> typable_empty_closed x h)
*)
type pclosed (e:exp) = (forall (x:var). not (appears_free_in x e))
assume val closed_appears_free : e:exp{closed e} -> Lemma (ensures (pclosed e))
assume val appears_free_closed : e:exp{pclosed e} -> Lemma (ensures (closed e))
(*
let rec appears_free_closed e =
match e with
| EVar _ -> ()
| EApp e1 e2 -> appears_free_closed e1; appears_free_closed e2
| ELam _ e1 -> appears_free_closed e1
*)
type below_env (x:var) (g:env) = (forall (y:var). y >= x ==> g y = None)
val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h)
let rec typable_below x #g #e #t h =
match h with
| TyVar y -> ()
| TyApp h1 h2 -> typable_below x h1; typable_below x h2
| TyLam _y h1 -> typable_below (x+1) h1
| TyUnit -> ()
val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (closed e))
let typable_empty_closed #e #t h = typable_below 0 h
val sub_beta_gen : var -> exp -> Tot sub
let sub_beta_gen x v = fun y -> if y < x then (EVar y)
else if y = x then v (* substitute *)
else (EVar (y-1)) (* shift -1 *)
val subst_gen_var_lt : x:var -> y:var{y < x} -> v:exp -> Lemma
(ensures (subst (sub_beta_gen x v) (EVar y) = (EVar y)))
let subst_gen_var_lt x y v = ()
val extend_lt : x:var -> y:var{y < x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g y)
let extend_lt x y g t_x = ()
val extend_gt : x:var -> y:var{y > x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g (y-1))
let extend_gt x y g t_x = ()
val extend_twice : x:var -> g:env -> t_x:typ -> t_y:typ -> Lemma
(ensures (feq (extend_gen 0 t_y (extend_gen x t_x g) )
(extend_gen (x+1) t_x (extend_gen 0 t_y g))))
let extend_twice x g t_x t_y = ()
type sub_below (x:var) (s:sub) = (forall (y:var). y<x ==> s y = EVar y)
val subst_below : x:var -> v:exp{below x v} -> s:sub{sub_below x s} ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_below x v s =
match v with
| EVar y -> ()
| EApp e1 e2 -> subst_below x e1 s; subst_below x e2 s
| ELam t e -> (subst_below (x+1) e (sub_elam s);
assert(e = subst (sub_elam s) e);
assert(v = ELam t e);
assert(subst s v = ELam t (subst (sub_elam s) e)))
| EUnit -> ()
val subst_closed : v:exp{closed v} -> s:sub ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_closed v s = subst_below 0 v s
val subst_gen_elam_aux : x:var -> v:exp{closed v} -> y:var -> Lemma
(ensures ((sub_elam (sub_beta_gen x v)) y = | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
x: StlcStrongDbParSubst.var ->
v: StlcStrongDbParSubst.exp{StlcCbvDbParSubst.closed v} ->
y: StlcStrongDbParSubst.var
-> FStar.Pervasives.Lemma
(ensures
StlcStrongDbParSubst.sub_elam (StlcCbvDbParSubst.sub_beta_gen x v) y =
StlcCbvDbParSubst.sub_beta_gen (x + 1) v y) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"StlcStrongDbParSubst.var",
"StlcStrongDbParSubst.exp",
"Prims.b2t",
"StlcCbvDbParSubst.closed",
"Prims.op_Equality",
"Prims.int",
"Prims.bool",
"Prims.op_LessThan",
"Prims.op_Subtraction",
"StlcCbvDbParSubst.subst_closed",
"StlcStrongDbParSubst.sub_inc",
"Prims.unit",
"Prims._assert",
"StlcCbvDbParSubst.sub_beta_gen",
"Prims.op_Addition",
"StlcStrongDbParSubst.sub_elam",
"StlcStrongDbParSubst.subst"
] | [] | false | false | true | false | false | let subst_gen_elam_aux x v y =
| if y = 0
then ()
else
(assert ((sub_elam (sub_beta_gen x v)) y = (subst sub_inc (sub_beta_gen x v (y - 1))));
if y - 1 < x
then ()
else
if y - 1 = x
then
(assert (sub_beta_gen x v (y - 1) = v);
assert (sub_beta_gen (x + 1) v y = v);
subst_closed v sub_inc)) | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.subst_gen_elam | val subst_gen_elam : x:var -> v:exp{closed v} -> t_y:typ -> e':exp -> Lemma
(ensures (subst (sub_beta_gen x v) (ELam t_y e') =
ELam t_y (subst (sub_beta_gen (x+1) v) e'))) | val subst_gen_elam : x:var -> v:exp{closed v} -> t_y:typ -> e':exp -> Lemma
(ensures (subst (sub_beta_gen x v) (ELam t_y e') =
ELam t_y (subst (sub_beta_gen (x+1) v) e'))) | let subst_gen_elam x v t_y e' =
subst_gen_elam_aux_forall x v;
subst_extensional (sub_elam (sub_beta_gen x v))
(sub_beta_gen (x+1) v) e';
assert(subst (sub_beta_gen x v) (ELam t_y e')
= ELam t_y (subst (sub_elam (sub_beta_gen x v)) e')) | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 63,
"end_line": 239,
"start_col": 0,
"start_line": 234
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h
val below : x:var -> e:exp -> Tot bool (decreases e)
let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true
val closed : exp -> Tot bool
let closed e = below 0 e
(* at some point we could try again to relate closed and appears_free *)
(* this didn't work for some reason
forall_intro #var #(fun (x:var) -> not (appears_free_in x e))
(fun (x:var) -> typable_empty_closed x h)
*)
type pclosed (e:exp) = (forall (x:var). not (appears_free_in x e))
assume val closed_appears_free : e:exp{closed e} -> Lemma (ensures (pclosed e))
assume val appears_free_closed : e:exp{pclosed e} -> Lemma (ensures (closed e))
(*
let rec appears_free_closed e =
match e with
| EVar _ -> ()
| EApp e1 e2 -> appears_free_closed e1; appears_free_closed e2
| ELam _ e1 -> appears_free_closed e1
*)
type below_env (x:var) (g:env) = (forall (y:var). y >= x ==> g y = None)
val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h)
let rec typable_below x #g #e #t h =
match h with
| TyVar y -> ()
| TyApp h1 h2 -> typable_below x h1; typable_below x h2
| TyLam _y h1 -> typable_below (x+1) h1
| TyUnit -> ()
val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (closed e))
let typable_empty_closed #e #t h = typable_below 0 h
val sub_beta_gen : var -> exp -> Tot sub
let sub_beta_gen x v = fun y -> if y < x then (EVar y)
else if y = x then v (* substitute *)
else (EVar (y-1)) (* shift -1 *)
val subst_gen_var_lt : x:var -> y:var{y < x} -> v:exp -> Lemma
(ensures (subst (sub_beta_gen x v) (EVar y) = (EVar y)))
let subst_gen_var_lt x y v = ()
val extend_lt : x:var -> y:var{y < x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g y)
let extend_lt x y g t_x = ()
val extend_gt : x:var -> y:var{y > x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g (y-1))
let extend_gt x y g t_x = ()
val extend_twice : x:var -> g:env -> t_x:typ -> t_y:typ -> Lemma
(ensures (feq (extend_gen 0 t_y (extend_gen x t_x g) )
(extend_gen (x+1) t_x (extend_gen 0 t_y g))))
let extend_twice x g t_x t_y = ()
type sub_below (x:var) (s:sub) = (forall (y:var). y<x ==> s y = EVar y)
val subst_below : x:var -> v:exp{below x v} -> s:sub{sub_below x s} ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_below x v s =
match v with
| EVar y -> ()
| EApp e1 e2 -> subst_below x e1 s; subst_below x e2 s
| ELam t e -> (subst_below (x+1) e (sub_elam s);
assert(e = subst (sub_elam s) e);
assert(v = ELam t e);
assert(subst s v = ELam t (subst (sub_elam s) e)))
| EUnit -> ()
val subst_closed : v:exp{closed v} -> s:sub ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_closed v s = subst_below 0 v s
val subst_gen_elam_aux : x:var -> v:exp{closed v} -> y:var -> Lemma
(ensures ((sub_elam (sub_beta_gen x v)) y =
(sub_beta_gen (x+1) v) y))
let subst_gen_elam_aux x v y =
if y = 0 then ()
else
(assert((sub_elam (sub_beta_gen x v)) y =
(subst sub_inc (sub_beta_gen x v (y-1))));
if y-1 < x then ()
else if y-1 = x then
(assert(sub_beta_gen x v (y-1) = v);
assert(sub_beta_gen (x+1) v y = v);
subst_closed v sub_inc)
else ())
val subst_gen_elam_aux_forall : x:var -> v:exp{closed v} -> Lemma
(ensures (feq (sub_elam (sub_beta_gen x v))
(sub_beta_gen (x+1) v)))
let subst_gen_elam_aux_forall x v = admit()
(* should follow from subst_gen_elam_aux and forall_intro *)
val subst_gen_elam : x:var -> v:exp{closed v} -> t_y:typ -> e':exp -> Lemma
(ensures (subst (sub_beta_gen x v) (ELam t_y e') = | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
x: StlcStrongDbParSubst.var ->
v: StlcStrongDbParSubst.exp{StlcCbvDbParSubst.closed v} ->
t_y: StlcStrongDbParSubst.typ ->
e': StlcStrongDbParSubst.exp
-> FStar.Pervasives.Lemma
(ensures
StlcStrongDbParSubst.subst (StlcCbvDbParSubst.sub_beta_gen x v)
(StlcStrongDbParSubst.ELam t_y e') =
StlcStrongDbParSubst.ELam t_y
(StlcStrongDbParSubst.subst (StlcCbvDbParSubst.sub_beta_gen (x + 1) v) e')) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"StlcStrongDbParSubst.var",
"StlcStrongDbParSubst.exp",
"Prims.b2t",
"StlcCbvDbParSubst.closed",
"StlcStrongDbParSubst.typ",
"Prims._assert",
"Prims.op_Equality",
"StlcStrongDbParSubst.subst",
"StlcCbvDbParSubst.sub_beta_gen",
"StlcStrongDbParSubst.ELam",
"StlcStrongDbParSubst.sub_elam",
"Prims.unit",
"StlcStrongDbParSubst.subst_extensional",
"Prims.op_Addition",
"StlcCbvDbParSubst.subst_gen_elam_aux_forall"
] | [] | true | false | true | false | false | let subst_gen_elam x v t_y e' =
| subst_gen_elam_aux_forall x v;
subst_extensional (sub_elam (sub_beta_gen x v)) (sub_beta_gen (x + 1) v) e';
assert (subst (sub_beta_gen x v) (ELam t_y e') = ELam t_y (subst (sub_elam (sub_beta_gen x v)) e')) | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.extend_gen_0 | val extend_gen_0 : t:typ -> g:env ->
Lemma (feq (extend_gen 0 t g) (extend t g)) | val extend_gen_0 : t:typ -> g:env ->
Lemma (feq (extend_gen 0 t g) (extend t g)) | let extend_gen_0 t g =
forall_intro (extend_gen_0_aux t g) | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 37,
"end_line": 275,
"start_col": 0,
"start_line": 274
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h
val below : x:var -> e:exp -> Tot bool (decreases e)
let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true
val closed : exp -> Tot bool
let closed e = below 0 e
(* at some point we could try again to relate closed and appears_free *)
(* this didn't work for some reason
forall_intro #var #(fun (x:var) -> not (appears_free_in x e))
(fun (x:var) -> typable_empty_closed x h)
*)
type pclosed (e:exp) = (forall (x:var). not (appears_free_in x e))
assume val closed_appears_free : e:exp{closed e} -> Lemma (ensures (pclosed e))
assume val appears_free_closed : e:exp{pclosed e} -> Lemma (ensures (closed e))
(*
let rec appears_free_closed e =
match e with
| EVar _ -> ()
| EApp e1 e2 -> appears_free_closed e1; appears_free_closed e2
| ELam _ e1 -> appears_free_closed e1
*)
type below_env (x:var) (g:env) = (forall (y:var). y >= x ==> g y = None)
val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h)
let rec typable_below x #g #e #t h =
match h with
| TyVar y -> ()
| TyApp h1 h2 -> typable_below x h1; typable_below x h2
| TyLam _y h1 -> typable_below (x+1) h1
| TyUnit -> ()
val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (closed e))
let typable_empty_closed #e #t h = typable_below 0 h
val sub_beta_gen : var -> exp -> Tot sub
let sub_beta_gen x v = fun y -> if y < x then (EVar y)
else if y = x then v (* substitute *)
else (EVar (y-1)) (* shift -1 *)
val subst_gen_var_lt : x:var -> y:var{y < x} -> v:exp -> Lemma
(ensures (subst (sub_beta_gen x v) (EVar y) = (EVar y)))
let subst_gen_var_lt x y v = ()
val extend_lt : x:var -> y:var{y < x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g y)
let extend_lt x y g t_x = ()
val extend_gt : x:var -> y:var{y > x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g (y-1))
let extend_gt x y g t_x = ()
val extend_twice : x:var -> g:env -> t_x:typ -> t_y:typ -> Lemma
(ensures (feq (extend_gen 0 t_y (extend_gen x t_x g) )
(extend_gen (x+1) t_x (extend_gen 0 t_y g))))
let extend_twice x g t_x t_y = ()
type sub_below (x:var) (s:sub) = (forall (y:var). y<x ==> s y = EVar y)
val subst_below : x:var -> v:exp{below x v} -> s:sub{sub_below x s} ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_below x v s =
match v with
| EVar y -> ()
| EApp e1 e2 -> subst_below x e1 s; subst_below x e2 s
| ELam t e -> (subst_below (x+1) e (sub_elam s);
assert(e = subst (sub_elam s) e);
assert(v = ELam t e);
assert(subst s v = ELam t (subst (sub_elam s) e)))
| EUnit -> ()
val subst_closed : v:exp{closed v} -> s:sub ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_closed v s = subst_below 0 v s
val subst_gen_elam_aux : x:var -> v:exp{closed v} -> y:var -> Lemma
(ensures ((sub_elam (sub_beta_gen x v)) y =
(sub_beta_gen (x+1) v) y))
let subst_gen_elam_aux x v y =
if y = 0 then ()
else
(assert((sub_elam (sub_beta_gen x v)) y =
(subst sub_inc (sub_beta_gen x v (y-1))));
if y-1 < x then ()
else if y-1 = x then
(assert(sub_beta_gen x v (y-1) = v);
assert(sub_beta_gen (x+1) v y = v);
subst_closed v sub_inc)
else ())
val subst_gen_elam_aux_forall : x:var -> v:exp{closed v} -> Lemma
(ensures (feq (sub_elam (sub_beta_gen x v))
(sub_beta_gen (x+1) v)))
let subst_gen_elam_aux_forall x v = admit()
(* should follow from subst_gen_elam_aux and forall_intro *)
val subst_gen_elam : x:var -> v:exp{closed v} -> t_y:typ -> e':exp -> Lemma
(ensures (subst (sub_beta_gen x v) (ELam t_y e') =
ELam t_y (subst (sub_beta_gen (x+1) v) e')))
let subst_gen_elam x v t_y e' =
subst_gen_elam_aux_forall x v;
subst_extensional (sub_elam (sub_beta_gen x v))
(sub_beta_gen (x+1) v) e';
assert(subst (sub_beta_gen x v) (ELam t_y e')
= ELam t_y (subst (sub_elam (sub_beta_gen x v)) e'))
val substitution_preserves_typing :
x:var -> #e:exp -> #v:exp -> #t_x:typ -> #t:typ -> #g:env ->
$h1:typing empty v t_x ->
$h2:typing (extend_gen x t_x g) e t ->
Tot (typing g (subst (sub_beta_gen x v) e) t) (decreases e)
let rec substitution_preserves_typing x #e #v #t_x #t #g h1 h2 =
match h2 with
| TyVar y ->
if x=y then (typable_empty_closed h1;
closed_appears_free v;
context_invariance h1 g)
else if y<x then context_invariance h2 g
else TyVar (y-1)
| TyLam #_ t_y #e' #t' h21 ->
let h21' = typing_extensional h21 (extend_gen (x+1) t_x (extend t_y g)) in
typable_empty_closed h1;
subst_gen_elam x v t_y e';
let h21' : (r:typing (extend_gen (x+1) t_x (extend t_y g)) e' t'{e' << e}) =
h21' in
TyLam t_y (substitution_preserves_typing (x+1) h1 h21')
| TyApp #_ #e1 #e2 #t11 #t12 h21 h22 ->
let h21 : (r:typing (extend_gen x t_x g) e1 (TArr t11 t12){e1 << e}) = h21 in
let h22 : (r:typing (extend_gen x t_x g) e2 t11{e2 << e}) = h22 in
(TyApp (substitution_preserves_typing x h1 h21)
(substitution_preserves_typing x h1 h22))
| TyUnit -> TyUnit
val extend_gen_0_aux : t:typ -> g:env -> y:var ->
Lemma (extend_gen 0 t g y = extend t g y)
let extend_gen_0_aux t g y = ()
val extend_gen_0 : t:typ -> g:env -> | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: StlcStrongDbParSubst.typ -> g: StlcStrongDbParSubst.env
-> FStar.Pervasives.Lemma
(ensures
FStar.FunctionalExtensionality.feq (StlcCbvDbParSubst.extend_gen 0 t g)
(StlcStrongDbParSubst.extend t g)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"StlcStrongDbParSubst.typ",
"StlcStrongDbParSubst.env",
"FStar.Classical.forall_intro",
"StlcStrongDbParSubst.var",
"Prims.b2t",
"Prims.op_Equality",
"FStar.Pervasives.Native.option",
"StlcCbvDbParSubst.extend_gen",
"StlcStrongDbParSubst.extend",
"StlcCbvDbParSubst.extend_gen_0_aux",
"Prims.unit"
] | [] | false | false | true | false | false | let extend_gen_0 t g =
| forall_intro (extend_gen_0_aux t g) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.make_constant_size_parser_precond_precond | val make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
(s1: bytes{Seq.length s1 == sz})
(s2: bytes{Seq.length s2 == sz})
: GTot Type0 | val make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
(s1: bytes{Seq.length s1 == sz})
(s2: bytes{Seq.length s2 == sz})
: GTot Type0 | let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 48,
"end_line": 38,
"start_col": 0,
"start_line": 31
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
sz: Prims.nat ->
t: Type ->
f:
(s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz}
-> Prims.GTot (FStar.Pervasives.Native.option t)) ->
s1: LowParse.Bytes.bytes{FStar.Seq.Base.length s1 == sz} ->
s2: LowParse.Bytes.bytes{FStar.Seq.Base.length s2 == sz}
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Pervasives.Native.option",
"Prims.l_and",
"Prims.l_or",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some"
] | [] | false | false | false | false | true | let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
(s1: bytes{Seq.length s1 == sz})
(s2: bytes{Seq.length s2 == sz})
: GTot Type0 =
| (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.substitution_preserves_typing | val substitution_preserves_typing :
x:var -> #e:exp -> #v:exp -> #t_x:typ -> #t:typ -> #g:env ->
$h1:typing empty v t_x ->
$h2:typing (extend_gen x t_x g) e t ->
Tot (typing g (subst (sub_beta_gen x v) e) t) (decreases e) | val substitution_preserves_typing :
x:var -> #e:exp -> #v:exp -> #t_x:typ -> #t:typ -> #g:env ->
$h1:typing empty v t_x ->
$h2:typing (extend_gen x t_x g) e t ->
Tot (typing g (subst (sub_beta_gen x v) e) t) (decreases e) | let rec substitution_preserves_typing x #e #v #t_x #t #g h1 h2 =
match h2 with
| TyVar y ->
if x=y then (typable_empty_closed h1;
closed_appears_free v;
context_invariance h1 g)
else if y<x then context_invariance h2 g
else TyVar (y-1)
| TyLam #_ t_y #e' #t' h21 ->
let h21' = typing_extensional h21 (extend_gen (x+1) t_x (extend t_y g)) in
typable_empty_closed h1;
subst_gen_elam x v t_y e';
let h21' : (r:typing (extend_gen (x+1) t_x (extend t_y g)) e' t'{e' << e}) =
h21' in
TyLam t_y (substitution_preserves_typing (x+1) h1 h21')
| TyApp #_ #e1 #e2 #t11 #t12 h21 h22 ->
let h21 : (r:typing (extend_gen x t_x g) e1 (TArr t11 t12){e1 << e}) = h21 in
let h22 : (r:typing (extend_gen x t_x g) e2 t11{e2 << e}) = h22 in
(TyApp (substitution_preserves_typing x h1 h21)
(substitution_preserves_typing x h1 h22))
| TyUnit -> TyUnit | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 20,
"end_line": 266,
"start_col": 0,
"start_line": 246
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h
val below : x:var -> e:exp -> Tot bool (decreases e)
let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true
val closed : exp -> Tot bool
let closed e = below 0 e
(* at some point we could try again to relate closed and appears_free *)
(* this didn't work for some reason
forall_intro #var #(fun (x:var) -> not (appears_free_in x e))
(fun (x:var) -> typable_empty_closed x h)
*)
type pclosed (e:exp) = (forall (x:var). not (appears_free_in x e))
assume val closed_appears_free : e:exp{closed e} -> Lemma (ensures (pclosed e))
assume val appears_free_closed : e:exp{pclosed e} -> Lemma (ensures (closed e))
(*
let rec appears_free_closed e =
match e with
| EVar _ -> ()
| EApp e1 e2 -> appears_free_closed e1; appears_free_closed e2
| ELam _ e1 -> appears_free_closed e1
*)
type below_env (x:var) (g:env) = (forall (y:var). y >= x ==> g y = None)
val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h)
let rec typable_below x #g #e #t h =
match h with
| TyVar y -> ()
| TyApp h1 h2 -> typable_below x h1; typable_below x h2
| TyLam _y h1 -> typable_below (x+1) h1
| TyUnit -> ()
val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (closed e))
let typable_empty_closed #e #t h = typable_below 0 h
val sub_beta_gen : var -> exp -> Tot sub
let sub_beta_gen x v = fun y -> if y < x then (EVar y)
else if y = x then v (* substitute *)
else (EVar (y-1)) (* shift -1 *)
val subst_gen_var_lt : x:var -> y:var{y < x} -> v:exp -> Lemma
(ensures (subst (sub_beta_gen x v) (EVar y) = (EVar y)))
let subst_gen_var_lt x y v = ()
val extend_lt : x:var -> y:var{y < x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g y)
let extend_lt x y g t_x = ()
val extend_gt : x:var -> y:var{y > x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g (y-1))
let extend_gt x y g t_x = ()
val extend_twice : x:var -> g:env -> t_x:typ -> t_y:typ -> Lemma
(ensures (feq (extend_gen 0 t_y (extend_gen x t_x g) )
(extend_gen (x+1) t_x (extend_gen 0 t_y g))))
let extend_twice x g t_x t_y = ()
type sub_below (x:var) (s:sub) = (forall (y:var). y<x ==> s y = EVar y)
val subst_below : x:var -> v:exp{below x v} -> s:sub{sub_below x s} ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_below x v s =
match v with
| EVar y -> ()
| EApp e1 e2 -> subst_below x e1 s; subst_below x e2 s
| ELam t e -> (subst_below (x+1) e (sub_elam s);
assert(e = subst (sub_elam s) e);
assert(v = ELam t e);
assert(subst s v = ELam t (subst (sub_elam s) e)))
| EUnit -> ()
val subst_closed : v:exp{closed v} -> s:sub ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_closed v s = subst_below 0 v s
val subst_gen_elam_aux : x:var -> v:exp{closed v} -> y:var -> Lemma
(ensures ((sub_elam (sub_beta_gen x v)) y =
(sub_beta_gen (x+1) v) y))
let subst_gen_elam_aux x v y =
if y = 0 then ()
else
(assert((sub_elam (sub_beta_gen x v)) y =
(subst sub_inc (sub_beta_gen x v (y-1))));
if y-1 < x then ()
else if y-1 = x then
(assert(sub_beta_gen x v (y-1) = v);
assert(sub_beta_gen (x+1) v y = v);
subst_closed v sub_inc)
else ())
val subst_gen_elam_aux_forall : x:var -> v:exp{closed v} -> Lemma
(ensures (feq (sub_elam (sub_beta_gen x v))
(sub_beta_gen (x+1) v)))
let subst_gen_elam_aux_forall x v = admit()
(* should follow from subst_gen_elam_aux and forall_intro *)
val subst_gen_elam : x:var -> v:exp{closed v} -> t_y:typ -> e':exp -> Lemma
(ensures (subst (sub_beta_gen x v) (ELam t_y e') =
ELam t_y (subst (sub_beta_gen (x+1) v) e')))
let subst_gen_elam x v t_y e' =
subst_gen_elam_aux_forall x v;
subst_extensional (sub_elam (sub_beta_gen x v))
(sub_beta_gen (x+1) v) e';
assert(subst (sub_beta_gen x v) (ELam t_y e')
= ELam t_y (subst (sub_elam (sub_beta_gen x v)) e'))
val substitution_preserves_typing :
x:var -> #e:exp -> #v:exp -> #t_x:typ -> #t:typ -> #g:env ->
$h1:typing empty v t_x ->
$h2:typing (extend_gen x t_x g) e t -> | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
x: StlcStrongDbParSubst.var ->
$h1: StlcStrongDbParSubst.typing StlcStrongDbParSubst.empty v t_x ->
$h2: StlcStrongDbParSubst.typing (StlcCbvDbParSubst.extend_gen x t_x g) e t
-> Prims.Tot
(StlcStrongDbParSubst.typing g
(StlcStrongDbParSubst.subst (StlcCbvDbParSubst.sub_beta_gen x v) e)
t) | Prims.Tot | [
"total",
""
] | [] | [
"StlcStrongDbParSubst.var",
"StlcStrongDbParSubst.exp",
"StlcStrongDbParSubst.typ",
"StlcStrongDbParSubst.env",
"StlcStrongDbParSubst.typing",
"StlcStrongDbParSubst.empty",
"StlcCbvDbParSubst.extend_gen",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"Prims.op_Equality",
"StlcCbvDbParSubst.context_invariance",
"Prims.unit",
"StlcCbvDbParSubst.closed_appears_free",
"StlcCbvDbParSubst.typable_empty_closed",
"Prims.bool",
"Prims.op_LessThan",
"StlcStrongDbParSubst.TyVar",
"Prims.op_Subtraction",
"StlcStrongDbParSubst.subst",
"StlcCbvDbParSubst.sub_beta_gen",
"StlcStrongDbParSubst.extend",
"StlcStrongDbParSubst.TyLam",
"Prims.op_Addition",
"StlcCbvDbParSubst.substitution_preserves_typing",
"Prims.precedes",
"StlcCbvDbParSubst.subst_gen_elam",
"StlcCbvDbParSubst.typing_extensional",
"StlcStrongDbParSubst.TArr",
"StlcStrongDbParSubst.TyApp",
"StlcStrongDbParSubst.TyUnit"
] | [
"recursion"
] | false | false | false | false | false | let rec substitution_preserves_typing x #e #v #t_x #t #g h1 h2 =
| match h2 with
| TyVar y ->
if x = y
then
(typable_empty_closed h1;
closed_appears_free v;
context_invariance h1 g)
else if y < x then context_invariance h2 g else TyVar (y - 1)
| TyLam #_ t_y #e' #t' h21 ->
let h21' = typing_extensional h21 (extend_gen (x + 1) t_x (extend t_y g)) in
typable_empty_closed h1;
subst_gen_elam x v t_y e';
let h21':(r: typing (extend_gen (x + 1) t_x (extend t_y g)) e' t' {e' << e}) = h21' in
TyLam t_y (substitution_preserves_typing (x + 1) h1 h21')
| TyApp #_ #e1 #e2 #t11 #t12 h21 h22 ->
let h21:(r: typing (extend_gen x t_x g) e1 (TArr t11 t12) {e1 << e}) = h21 in
let h22:(r: typing (extend_gen x t_x g) e2 t11 {e2 << e}) = h22 in
(TyApp (substitution_preserves_typing x h1 h21) (substitution_preserves_typing x h1 h22))
| TyUnit -> TyUnit | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.make_constant_size_parser_precond | val make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: GTot Type0 | val make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: GTot Type0 | let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 78,
"end_line": 46,
"start_col": 0,
"start_line": 40
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
sz: Prims.nat ->
t: Type ->
f:
(s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz}
-> Prims.GTot (FStar.Pervasives.Native.option t))
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Pervasives.Native.option",
"Prims.l_Forall",
"Prims.l_imp",
"LowParse.Spec.Combinators.make_constant_size_parser_precond_precond",
"FStar.Seq.Base.equal"
] | [] | false | false | false | false | true | let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: GTot Type0 =
| forall (s1: bytes{Seq.length s1 == sz}) (s2: bytes{Seq.length s2 == sz}). {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 | false |
StlcCbvDbParSubst.fst | StlcCbvDbParSubst.preservation | val preservation : #e:exp -> #t:typ -> h:typing empty e t{Some? (step e)} ->
Tot (typing empty (Some?.v (step e)) t) (decreases e) | val preservation : #e:exp -> #t:typ -> h:typing empty e t{Some? (step e)} ->
Tot (typing empty (Some?.v (step e)) t) (decreases e) | let rec preservation #e #t h =
let TyApp #g #e1 #e2 #t11 #t12 h1 h2 = h in
if is_value e1
then (if is_value e2
then let TyLam t_x hbody = h1 in
(extend_gen_0 t_x empty;
substitution_preserves_typing 0 h2 (extend_gen_typing_conversion hbody))
else TyApp h1 (preservation h2))
else TyApp (preservation h1) h2 | {
"file_name": "examples/metatheory/StlcCbvDbParSubst.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 36,
"end_line": 290,
"start_col": 0,
"start_line": 282
} | (*
Copyright 2008-2014 Catalin Hritcu, Nikhil Swamy, Microsoft Research and Inria
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module StlcCbvDbParSubst
(* Constructive style progress and preservation proof for STLC with
CBV reduction, using deBruijn indices and parallel substitution.
An awkward special case of stlc_strong...; in fact this proof
is _more_ complex than the one in stlc_strong...! *)
open FStar.Classical
open FStar.FunctionalExtensionality
open StlcStrongDbParSubst
(* Weakening (or shifting preserves typing) *)
(* Useless now, showing that it follows from substitution lemma *)
val sub_inc_above : nat -> var -> Tot exp
let sub_inc_above n y = if y<n then EVar y else EVar (y+1)
val shift_up_above : nat -> exp -> Tot exp
let shift_up_above n e = subst (sub_inc_above n) e
val extend_gen : var -> typ -> env -> Tot env
let extend_gen x t g = if x = 0 then extend t g
else (fun y -> if y < x then g y
else if y = x then Some t
else g (y-1))
irreducible val weakening : n:nat -> #g:env -> #e:exp -> #t:typ -> t':typ ->
h:typing g e t -> Tot (typing (extend_gen n t' g) (shift_up_above n e) t)
(decreases h)
let rec weakening n #g #v #t t' h =
let hs : subst_typing (sub_inc_above n) g (extend_gen n t' g) =
fun y -> if y < n then TyVar y else TyVar (y+1)
in substitution (sub_inc_above n) h hs
val step : exp -> Tot (option exp)
let rec step e =
match e with
| EApp e1 e2 ->
if is_value e1 then
if is_value e2 then
match e1 with
| ELam t e' -> Some (subst (sub_beta e2) e')
| _ -> None
else
match (step e2) with
| Some e2' -> Some (EApp e1 e2')
| None -> None
else
(match (step e1) with
| Some e1' -> Some (EApp e1' e2)
| None -> None)
| _ -> None
val progress : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (is_value e \/ (Some? (step e)))) (decreases h)
let rec progress #e #t h =
if TyApp? h then let TyApp h1 h2 = h in progress h1; progress h2
(* Typing extensional (weaker) and context invariance (stronger) lemmas *)
(* Typing extensional follows directly from functional extensionality
(it's also a special case of context invariance below) *)
irreducible val typing_extensional : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{feq g g'} -> Tot (typing g' e t) (decreases h)
let rec typing_extensional #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t h -> TyLam t (typing_extensional h (extend t g'))
| TyApp h1 h2 -> TyApp (typing_extensional h1 g') (typing_extensional h2 g')
| TyUnit -> TyUnit
val appears_free_in : x:var -> e:exp -> Tot bool (decreases e)
let rec appears_free_in x e =
match e with
| EVar y -> x = y
| EApp e1 e2 -> appears_free_in x e1 || appears_free_in x e2
| ELam _ e1 -> appears_free_in (x+1) e1
| EUnit -> false
type envEqualE (e:exp) (g1:env) (g2:env) =
(forall (x:var). appears_free_in x e ==> g1 x = g2 x)
(* Context invariance (actually used in a single place within substitution,
for in a specific form of weakening when typing variables) *)
val context_invariance : #e:exp -> #g:env -> #t:typ ->
h:(typing g e t) -> g':env{envEqualE e g g'} ->
Tot (typing g' e t) (decreases h)
let rec context_invariance #e #g #t h g' =
match h with
| TyVar x -> TyVar x
| TyLam t_y h1 ->
TyLam t_y (context_invariance h1 (extend t_y g'))
| TyApp h1 h2 ->
TyApp (context_invariance h1 g') (context_invariance h2 g')
| TyUnit -> TyUnit
val free_in_context : x:var -> #e:exp -> #g:env -> #t:typ -> h:typing g e t ->
Lemma (requires True) (ensures (appears_free_in x e ==> Some? (g x))) (decreases h)
let rec free_in_context x #e #g #t h =
match h with
| TyVar x -> ()
| TyLam t h1 -> free_in_context (x+1) h1
| TyApp h1 h2 -> free_in_context x h1; free_in_context x h2
| TyUnit -> ()
val typable_empty_not_free : x:var -> #e:exp -> #t:typ -> typing empty e t ->
Lemma (ensures (not (appears_free_in x e)))
(* [SMTPat (appears_free_in x e)] -- CH: adding this makes it fail! *)
let typable_empty_not_free x #e #t h = free_in_context x h
val below : x:var -> e:exp -> Tot bool (decreases e)
let rec below x e =
match e with
| EVar y -> y < x
| EApp e1 e2 -> below x e1 && below x e2
| ELam _ e1 -> below (x+1) e1
| EUnit -> true
val closed : exp -> Tot bool
let closed e = below 0 e
(* at some point we could try again to relate closed and appears_free *)
(* this didn't work for some reason
forall_intro #var #(fun (x:var) -> not (appears_free_in x e))
(fun (x:var) -> typable_empty_closed x h)
*)
type pclosed (e:exp) = (forall (x:var). not (appears_free_in x e))
assume val closed_appears_free : e:exp{closed e} -> Lemma (ensures (pclosed e))
assume val appears_free_closed : e:exp{pclosed e} -> Lemma (ensures (closed e))
(*
let rec appears_free_closed e =
match e with
| EVar _ -> ()
| EApp e1 e2 -> appears_free_closed e1; appears_free_closed e2
| ELam _ e1 -> appears_free_closed e1
*)
type below_env (x:var) (g:env) = (forall (y:var). y >= x ==> g y = None)
val typable_below : x:var -> #g:env -> #e:exp -> #t:typ
-> h:typing g e t{below_env x g} ->
Lemma (requires True) (ensures (below x e)) (decreases h)
let rec typable_below x #g #e #t h =
match h with
| TyVar y -> ()
| TyApp h1 h2 -> typable_below x h1; typable_below x h2
| TyLam _y h1 -> typable_below (x+1) h1
| TyUnit -> ()
val typable_empty_closed : #e:exp -> #t:typ -> h:typing empty e t ->
Lemma (ensures (closed e))
let typable_empty_closed #e #t h = typable_below 0 h
val sub_beta_gen : var -> exp -> Tot sub
let sub_beta_gen x v = fun y -> if y < x then (EVar y)
else if y = x then v (* substitute *)
else (EVar (y-1)) (* shift -1 *)
val subst_gen_var_lt : x:var -> y:var{y < x} -> v:exp -> Lemma
(ensures (subst (sub_beta_gen x v) (EVar y) = (EVar y)))
let subst_gen_var_lt x y v = ()
val extend_lt : x:var -> y:var{y < x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g y)
let extend_lt x y g t_x = ()
val extend_gt : x:var -> y:var{y > x} -> g:env -> t_x:typ -> Lemma
(ensures (extend_gen x t_x g) y = g (y-1))
let extend_gt x y g t_x = ()
val extend_twice : x:var -> g:env -> t_x:typ -> t_y:typ -> Lemma
(ensures (feq (extend_gen 0 t_y (extend_gen x t_x g) )
(extend_gen (x+1) t_x (extend_gen 0 t_y g))))
let extend_twice x g t_x t_y = ()
type sub_below (x:var) (s:sub) = (forall (y:var). y<x ==> s y = EVar y)
val subst_below : x:var -> v:exp{below x v} -> s:sub{sub_below x s} ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_below x v s =
match v with
| EVar y -> ()
| EApp e1 e2 -> subst_below x e1 s; subst_below x e2 s
| ELam t e -> (subst_below (x+1) e (sub_elam s);
assert(e = subst (sub_elam s) e);
assert(v = ELam t e);
assert(subst s v = ELam t (subst (sub_elam s) e)))
| EUnit -> ()
val subst_closed : v:exp{closed v} -> s:sub ->
Lemma (requires True) (ensures (v = subst s v)) (decreases v)
let rec subst_closed v s = subst_below 0 v s
val subst_gen_elam_aux : x:var -> v:exp{closed v} -> y:var -> Lemma
(ensures ((sub_elam (sub_beta_gen x v)) y =
(sub_beta_gen (x+1) v) y))
let subst_gen_elam_aux x v y =
if y = 0 then ()
else
(assert((sub_elam (sub_beta_gen x v)) y =
(subst sub_inc (sub_beta_gen x v (y-1))));
if y-1 < x then ()
else if y-1 = x then
(assert(sub_beta_gen x v (y-1) = v);
assert(sub_beta_gen (x+1) v y = v);
subst_closed v sub_inc)
else ())
val subst_gen_elam_aux_forall : x:var -> v:exp{closed v} -> Lemma
(ensures (feq (sub_elam (sub_beta_gen x v))
(sub_beta_gen (x+1) v)))
let subst_gen_elam_aux_forall x v = admit()
(* should follow from subst_gen_elam_aux and forall_intro *)
val subst_gen_elam : x:var -> v:exp{closed v} -> t_y:typ -> e':exp -> Lemma
(ensures (subst (sub_beta_gen x v) (ELam t_y e') =
ELam t_y (subst (sub_beta_gen (x+1) v) e')))
let subst_gen_elam x v t_y e' =
subst_gen_elam_aux_forall x v;
subst_extensional (sub_elam (sub_beta_gen x v))
(sub_beta_gen (x+1) v) e';
assert(subst (sub_beta_gen x v) (ELam t_y e')
= ELam t_y (subst (sub_elam (sub_beta_gen x v)) e'))
val substitution_preserves_typing :
x:var -> #e:exp -> #v:exp -> #t_x:typ -> #t:typ -> #g:env ->
$h1:typing empty v t_x ->
$h2:typing (extend_gen x t_x g) e t ->
Tot (typing g (subst (sub_beta_gen x v) e) t) (decreases e)
let rec substitution_preserves_typing x #e #v #t_x #t #g h1 h2 =
match h2 with
| TyVar y ->
if x=y then (typable_empty_closed h1;
closed_appears_free v;
context_invariance h1 g)
else if y<x then context_invariance h2 g
else TyVar (y-1)
| TyLam #_ t_y #e' #t' h21 ->
let h21' = typing_extensional h21 (extend_gen (x+1) t_x (extend t_y g)) in
typable_empty_closed h1;
subst_gen_elam x v t_y e';
let h21' : (r:typing (extend_gen (x+1) t_x (extend t_y g)) e' t'{e' << e}) =
h21' in
TyLam t_y (substitution_preserves_typing (x+1) h1 h21')
| TyApp #_ #e1 #e2 #t11 #t12 h21 h22 ->
let h21 : (r:typing (extend_gen x t_x g) e1 (TArr t11 t12){e1 << e}) = h21 in
let h22 : (r:typing (extend_gen x t_x g) e2 t11{e2 << e}) = h22 in
(TyApp (substitution_preserves_typing x h1 h21)
(substitution_preserves_typing x h1 h22))
| TyUnit -> TyUnit
val extend_gen_0_aux : t:typ -> g:env -> y:var ->
Lemma (extend_gen 0 t g y = extend t g y)
let extend_gen_0_aux t g y = ()
val extend_gen_0 : t:typ -> g:env ->
Lemma (feq (extend_gen 0 t g) (extend t g))
let extend_gen_0 t g =
forall_intro (extend_gen_0_aux t g)
let rec extend_gen_typing_conversion (#t:typ) (#g:env) (#e0:exp) (#t0:typ) (h:typing (extend t g) e0 t0)
:Tot (typing (extend_gen 0 t g) e0 t0) = h
val preservation : #e:exp -> #t:typ -> h:typing empty e t{Some? (step e)} -> | {
"checked_file": "/",
"dependencies": [
"StlcStrongDbParSubst.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "StlcCbvDbParSubst.fst"
} | [
{
"abbrev": false,
"full_module": "StlcStrongDbParSubst",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.FunctionalExtensionality",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Classical",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: StlcStrongDbParSubst.typing StlcStrongDbParSubst.empty e t {Some? (StlcCbvDbParSubst.step e)}
-> Prims.Tot
(StlcStrongDbParSubst.typing StlcStrongDbParSubst.empty (Some?.v (StlcCbvDbParSubst.step e)) t) | Prims.Tot | [
"total",
""
] | [] | [
"StlcStrongDbParSubst.exp",
"StlcStrongDbParSubst.typ",
"StlcStrongDbParSubst.typing",
"StlcStrongDbParSubst.empty",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"StlcCbvDbParSubst.step",
"StlcStrongDbParSubst.env",
"StlcStrongDbParSubst.TArr",
"StlcStrongDbParSubst.is_value",
"StlcStrongDbParSubst.extend",
"StlcCbvDbParSubst.substitution_preserves_typing",
"StlcCbvDbParSubst.extend_gen_typing_conversion",
"Prims.unit",
"StlcCbvDbParSubst.extend_gen_0",
"FStar.Pervasives.Native.__proj__Some__item__v",
"Prims.bool",
"StlcStrongDbParSubst.TyApp",
"StlcCbvDbParSubst.preservation"
] | [
"recursion"
] | false | false | false | false | false | let rec preservation #e #t h =
| let TyApp #g #e1 #e2 #t11 #t12 h1 h2 = h in
if is_value e1
then
(if is_value e2
then
let TyLam t_x hbody = h1 in
(extend_gen_0 t_x empty;
substitution_preserves_typing 0 h2 (extend_gen_typing_conversion hbody))
else TyApp h1 (preservation h2))
else TyApp (preservation h1) h2 | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.make_total_constant_size_parser_precond | val make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot t))
: GTot Type0 | val make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot t))
: GTot Type0 | let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2 | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 34,
"end_line": 149,
"start_col": 0,
"start_line": 143
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
sz: Prims.nat ->
t: Type ->
f: (s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz} -> Prims.GTot t)
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"Prims.l_Forall",
"Prims.l_imp",
"FStar.Seq.Base.equal"
] | [] | false | false | false | false | true | let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot t))
: GTot Type0 =
| forall (s1: bytes{Seq.length s1 == sz}) (s2: bytes{Seq.length s2 == sz}). {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2 | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.make_constant_size_parser_aux | val make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: Tot (bare_parser t) | val make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: Tot (bare_parser t) | let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 5,
"end_line": 29,
"start_col": 0,
"start_line": 14
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
sz: Prims.nat ->
t: Type ->
f:
(s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz}
-> Prims.GTot (FStar.Pervasives.Native.option t))
-> LowParse.Spec.Base.bare_parser t | Prims.Tot | [
"total"
] | [] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Pervasives.Native.option",
"Prims.op_LessThan",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"Prims.bool",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Seq.Base.slice",
"LowParse.Spec.Base.bare_parser"
] | [] | false | false | false | false | false | let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: Tot (bare_parser t) =
| fun (s: bytes) ->
if Seq.length s < sz
then None
else
let s':bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let sz:consumed_length s = sz in
Some (v, sz) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_ret_kind | val parse_ret_kind:parser_kind | val parse_ret_kind:parser_kind | let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 55,
"end_line": 195,
"start_col": 0,
"start_line": 194
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | LowParse.Spec.Base.parser_kind | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.strong_parser_kind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.parser_kind_metadata_some",
"LowParse.Spec.Base.ParserKindMetadataTotal"
] | [] | false | false | false | true | false | let parse_ret_kind:parser_kind =
| strong_parser_kind 0 0 (Some ParserKindMetadataTotal) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_empty | val parse_empty:parser parse_ret_kind unit | val parse_empty:parser parse_ret_kind unit | let parse_empty : parser parse_ret_kind unit =
parse_ret () | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 14,
"end_line": 215,
"start_col": 0,
"start_line": 214
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | LowParse.Spec.Base.parser LowParse.Spec.Combinators.parse_ret_kind Prims.unit | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Combinators.parse_ret",
"Prims.unit"
] | [] | false | false | false | true | false | let parse_empty:parser parse_ret_kind unit =
| parse_ret () | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_ret | val parse_ret (#t: Type) (v: t) : Tot (parser parse_ret_kind t) | val parse_ret (#t: Type) (v: t) : Tot (parser parse_ret_kind t) | let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 17,
"end_line": 202,
"start_col": 0,
"start_line": 201
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | v: t -> LowParse.Spec.Base.parser LowParse.Spec.Combinators.parse_ret_kind t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Combinators.tot_parse_ret",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.parse_ret_kind"
] | [] | false | false | false | true | false | let parse_ret (#t: Type) (v: t) : Tot (parser parse_ret_kind t) =
| tot_parse_ret v | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.make_constant_size_parser_injective | val make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: Lemma (requires (make_constant_size_parser_precond sz t f))
(ensures (injective (make_constant_size_parser_aux sz t f))) | val make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: Lemma (requires (make_constant_size_parser_precond sz t f))
(ensures (injective (make_constant_size_parser_aux sz t f))) | let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 81,
"end_line": 83,
"start_col": 0,
"start_line": 56
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
sz: Prims.nat ->
t: Type ->
f:
(s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz}
-> Prims.GTot (FStar.Pervasives.Native.option t))
-> FStar.Pervasives.Lemma
(requires LowParse.Spec.Combinators.make_constant_size_parser_precond sz t f)
(ensures
LowParse.Spec.Base.injective (LowParse.Spec.Combinators.make_constant_size_parser_aux sz t f
)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Pervasives.Native.option",
"FStar.Classical.forall_intro_2",
"Prims.l_imp",
"LowParse.Spec.Base.injective_precond",
"LowParse.Spec.Base.injective_postcond",
"FStar.Classical.move_requires",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Base.consumed_length",
"Prims._assert",
"LowParse.Spec.Combinators.make_constant_size_parser_precond'",
"LowParse.Spec.Combinators.make_constant_size_parser_precond_precond",
"FStar.Seq.Base.slice",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.parse",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"LowParse.Spec.Base.bare_parser",
"LowParse.Spec.Combinators.make_constant_size_parser_aux",
"LowParse.Spec.Combinators.make_constant_size_parser_precond",
"LowParse.Spec.Base.injective"
] | [] | false | false | true | false | false | let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: Lemma (requires (make_constant_size_parser_precond sz t f))
(ensures (injective (make_constant_size_parser_aux sz t f))) =
| let p:bare_parser t = make_constant_size_parser_aux sz t f in
let prf1 (b1 b2: bytes)
: Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) =
assert (Some? (parse p b1));
assert (Some? (parse p b2));
let Some (v1, len1) = parse p b1 in
let Some (v2, len2) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz
t
f
(Seq.slice b1 0 len1)
(Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.constant_size_parser_kind | val constant_size_parser_kind (sz: nat) : Tot parser_kind | val constant_size_parser_kind (sz: nat) : Tot parser_kind | let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 31,
"end_line": 88,
"start_col": 0,
"start_line": 85
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | sz: Prims.nat -> LowParse.Spec.Base.parser_kind | Prims.Tot | [
"total"
] | [] | [
"Prims.nat",
"LowParse.Spec.Base.strong_parser_kind",
"FStar.Pervasives.Native.None",
"LowParse.Spec.Base.parser_kind_metadata_some",
"LowParse.Spec.Base.parser_kind"
] | [] | false | false | false | true | false | let constant_size_parser_kind (sz: nat) : Tot parser_kind =
| strong_parser_kind sz sz None | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.make_constant_size_parser_precond' | val make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: GTot Type0 | val make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: GTot Type0 | let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 71,
"end_line": 54,
"start_col": 0,
"start_line": 48
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
sz: Prims.nat ->
t: Type ->
f:
(s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz}
-> Prims.GTot (FStar.Pervasives.Native.option t))
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Pervasives.Native.option",
"Prims.l_Forall",
"Prims.l_imp",
"LowParse.Spec.Combinators.make_constant_size_parser_precond_precond",
"Prims.l_or"
] | [] | false | false | false | false | true | let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: GTot Type0 =
| forall (s1: bytes{Seq.length s1 == sz}) (s2: bytes{Seq.length s2 == sz}). {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.tot_make_constant_size_parser_aux | val tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> Tot (option t)))
: Tot (tot_bare_parser t) | val tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> Tot (option t)))
: Tot (tot_bare_parser t) | let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 5,
"end_line": 123,
"start_col": 0,
"start_line": 108
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
sz: Prims.nat ->
t: Type ->
f: (s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz} -> FStar.Pervasives.Native.option t)
-> LowParse.Spec.Base.tot_bare_parser t | Prims.Tot | [
"total"
] | [] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Pervasives.Native.option",
"Prims.op_LessThan",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"Prims.bool",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Seq.Base.slice",
"LowParse.Spec.Base.tot_bare_parser"
] | [] | false | false | false | false | false | let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> Tot (option t)))
: Tot (tot_bare_parser t) =
| fun (s: bytes) ->
if Seq.length s < sz
then None
else
let s':bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let sz:consumed_length s = sz in
Some (v, sz) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.make_constant_size_parser | val make_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: Pure (parser (constant_size_parser_kind sz) t)
(requires (make_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) | val make_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: Pure (parser (constant_size_parser_kind sz) t)
(requires (make_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) | let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 106,
"start_col": 0,
"start_line": 90
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
sz: Prims.nat ->
t: Type ->
f:
(s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz}
-> Prims.GTot (FStar.Pervasives.Native.option t))
-> Prims.Pure
(LowParse.Spec.Base.parser (LowParse.Spec.Combinators.constant_size_parser_kind sz) t) | Prims.Pure | [] | [] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Pervasives.Native.option",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.constant_size_parser_kind",
"LowParse.Spec.Combinators.make_constant_size_parser_injective",
"LowParse.Spec.Base.bare_parser",
"LowParse.Spec.Combinators.make_constant_size_parser_aux",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.make_constant_size_parser_precond",
"Prims.l_True"
] | [] | false | false | false | false | false | let make_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot (option t)))
: Pure (parser (constant_size_parser_kind sz) t)
(requires (make_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) =
| let p:bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.fail_parser' | val fail_parser' (t: Type) : Tot (tot_bare_parser t) | val fail_parser' (t: Type) : Tot (tot_bare_parser t) | let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 15,
"end_line": 230,
"start_col": 0,
"start_line": 227
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: Type -> LowParse.Spec.Base.tot_bare_parser t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Bytes.bytes",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.tot_bare_parser"
] | [] | false | false | false | true | false | let fail_parser' (t: Type) : Tot (tot_bare_parser t) =
| fun _ -> None | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.serialize_ret | val serialize_ret (#t: Type) (v: t) (v_unique: (v': t -> Lemma (v == v')))
: Tot (serializer (parse_ret v)) | val serialize_ret (#t: Type) (v: t) (v_unique: (v': t -> Lemma (v == v')))
: Tot (serializer (parse_ret v)) | let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 25,
"end_line": 212,
"start_col": 0,
"start_line": 204
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | v: t -> v_unique: (v': t -> FStar.Pervasives.Lemma (ensures v == v'))
-> LowParse.Spec.Base.serializer (LowParse.Spec.Combinators.parse_ret v) | Prims.Tot | [
"total"
] | [] | [
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Base.mk_serializer",
"LowParse.Spec.Combinators.parse_ret_kind",
"LowParse.Spec.Combinators.parse_ret",
"FStar.Seq.Base.empty",
"LowParse.Bytes.byte",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.serializer"
] | [] | false | false | false | false | false | let serialize_ret (#t: Type) (v: t) (v_unique: (v': t -> Lemma (v == v')))
: Tot (serializer (parse_ret v)) =
| mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_ret' | val parse_ret' (#t: Type) (v: t) : Tot (tot_bare_parser t) | val parse_ret' (#t: Type) (v: t) : Tot (tot_bare_parser t) | let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 54,
"end_line": 190,
"start_col": 0,
"start_line": 189
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | v: t -> LowParse.Spec.Base.tot_bare_parser t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Bytes.bytes",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.tot_bare_parser"
] | [] | false | false | false | true | false | let parse_ret' (#t: Type) (v: t) : Tot (tot_bare_parser t) =
| fun (b: bytes) -> Some (v, (0 <: consumed_length b)) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.tot_make_total_constant_size_parser | val tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> Tot t))
: Pure (tot_parser (total_constant_size_parser_kind sz) t)
(requires (make_total_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) | val tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> Tot t))
: Pure (tot_parser (total_constant_size_parser_kind sz) t)
(requires (make_total_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) | let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 183,
"start_col": 0,
"start_line": 168
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | sz: Prims.nat -> t: Type -> f: (s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz} -> t)
-> Prims.Pure
(LowParse.Spec.Base.tot_parser (LowParse.Spec.Base.total_constant_size_parser_kind sz) t) | Prims.Pure | [] | [] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Base.total_constant_size_parser_kind",
"LowParse.Spec.Base.tot_bare_parser",
"LowParse.Spec.Combinators.tot_make_constant_size_parser",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Combinators.make_total_constant_size_parser_precond",
"Prims.l_True"
] | [] | false | false | false | false | false | let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> Tot t))
: Pure (tot_parser (total_constant_size_parser_kind sz) t)
(requires (make_total_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) =
| let p:tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_false_kind | val parse_false_kind : LowParse.Spec.Base.parser_kind | let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 75,
"end_line": 261,
"start_col": 0,
"start_line": 261
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | LowParse.Spec.Base.parser_kind | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.strong_parser_kind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.parser_kind_metadata_some",
"LowParse.Spec.Base.ParserKindMetadataFail"
] | [] | false | false | false | true | false | let parse_false_kind =
| strong_parser_kind 0 0 (Some ParserKindMetadataFail) | false |
|
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.tot_make_constant_size_parser | val tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> Tot (option t)))
: Pure (tot_parser (constant_size_parser_kind sz) t)
(requires (make_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) | val tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> Tot (option t)))
: Pure (tot_parser (constant_size_parser_kind sz) t)
(requires (make_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) | let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 141,
"start_col": 0,
"start_line": 125
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
sz: Prims.nat ->
t: Type ->
f: (s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz} -> FStar.Pervasives.Native.option t)
-> Prims.Pure
(LowParse.Spec.Base.tot_parser (LowParse.Spec.Combinators.constant_size_parser_kind sz) t) | Prims.Pure | [] | [] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Pervasives.Native.option",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.constant_size_parser_kind",
"LowParse.Spec.Combinators.make_constant_size_parser_injective",
"LowParse.Spec.Base.tot_bare_parser",
"LowParse.Spec.Combinators.tot_make_constant_size_parser_aux",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Combinators.make_constant_size_parser_precond",
"Prims.l_True"
] | [] | false | false | false | false | false | let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> Tot (option t)))
: Pure (tot_parser (constant_size_parser_kind sz) t)
(requires (make_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) =
| let p:tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.make_total_constant_size_parser | val make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot t))
: Pure (parser (total_constant_size_parser_kind sz) t)
(requires (make_total_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) | val make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot t))
: Pure (parser (total_constant_size_parser_kind sz) t)
(requires (make_total_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) | let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 166,
"start_col": 0,
"start_line": 151
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
sz: Prims.nat ->
t: Type ->
f: (s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz} -> Prims.GTot t)
-> Prims.Pure
(LowParse.Spec.Base.parser (LowParse.Spec.Base.total_constant_size_parser_kind sz) t) | Prims.Pure | [] | [] | [
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Base.total_constant_size_parser_kind",
"LowParse.Spec.Base.bare_parser",
"LowParse.Spec.Combinators.make_constant_size_parser",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.make_total_constant_size_parser_precond",
"Prims.l_True"
] | [] | false | false | false | false | false | let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: (s: bytes{Seq.length s == sz} -> GTot t))
: Pure (parser (total_constant_size_parser_kind sz) t)
(requires (make_total_constant_size_parser_precond sz t f))
(ensures (fun _ -> True)) =
| let p:bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.tot_parse_ret | val tot_parse_ret (#t: Type) (v: t) : Tot (tot_parser parse_ret_kind t) | val tot_parse_ret (#t: Type) (v: t) : Tot (tot_parser parse_ret_kind t) | let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 14,
"end_line": 199,
"start_col": 0,
"start_line": 197
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | v: t -> LowParse.Spec.Base.tot_parser LowParse.Spec.Combinators.parse_ret_kind t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Combinators.parse_ret'",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.parse_ret_kind",
"LowParse.Spec.Base.tot_parser"
] | [] | false | false | false | true | false | let tot_parse_ret (#t: Type) (v: t) : Tot (tot_parser parse_ret_kind t) =
| parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.serialize_empty | val serialize_empty:serializer parse_empty | val serialize_empty:serializer parse_empty | let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 77,
"end_line": 217,
"start_col": 0,
"start_line": 217
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | LowParse.Spec.Base.serializer LowParse.Spec.Combinators.parse_empty | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Combinators.serialize_ret",
"Prims.unit"
] | [] | false | false | false | true | false | let serialize_empty:serializer parse_empty =
| serialize_ret () (fun _ -> ()) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.fail_parser_kind_precond | val fail_parser_kind_precond (k: parser_kind) : GTot Type0 | val fail_parser_kind_precond (k: parser_kind) : GTot Type0 | let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 80,
"end_line": 225,
"start_col": 0,
"start_line": 221
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16" | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | k: LowParse.Spec.Base.parser_kind -> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"Prims.l_and",
"Prims.b2t",
"Prims.op_disEquality",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_kind_metadata_some",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_metadata",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserKindMetadataTotal",
"Prims.l_imp",
"FStar.Pervasives.Native.uu___is_Some",
"Prims.nat",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high",
"Prims.op_LessThanOrEqual",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low",
"FStar.Pervasives.Native.__proj__Some__item__v"
] | [] | false | false | false | false | true | let fail_parser_kind_precond (k: parser_kind) : GTot Type0 =
| k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.and_then_cases_injective | val and_then_cases_injective (#t #t': Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 | val and_then_cases_injective (#t #t': Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 | let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2 | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 10,
"end_line": 308,
"start_col": 0,
"start_line": 301
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p': (_: t -> LowParse.Spec.Base.bare_parser t') -> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Spec.Base.bare_parser",
"Prims.l_Forall",
"LowParse.Bytes.bytes",
"Prims.l_imp",
"LowParse.Spec.Combinators.and_then_cases_injective_precond",
"Prims.eq2",
"LowParse.Spec.Base.parse"
] | [] | false | false | false | false | true | let and_then_cases_injective (#t #t': Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 =
| forall (x1: t) (x2: t) (b1: bytes) (b2: bytes). {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.and_then_metadata | val and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t | val and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t | let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 13,
"end_line": 442,
"start_col": 0,
"start_line": 435
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | k1: LowParse.Spec.Base.parser_kind_metadata_t -> k2: LowParse.Spec.Base.parser_kind_metadata_t
-> LowParse.Spec.Base.parser_kind_metadata_t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind_metadata_t",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_kind_metadata_some",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.None"
] | [] | false | false | false | true | false | let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t =
| match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.fail_serializer | val fail_serializer
(k: parser_kind{fail_parser_kind_precond k})
(t: Type)
(prf: (x: t -> Lemma False))
: Tot (serializer (fail_parser k t)) | val fail_serializer
(k: parser_kind{fail_parser_kind_precond k})
(t: Type)
(prf: (x: t -> Lemma False))
: Tot (serializer (fail_parser k t)) | let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 20,
"end_line": 258,
"start_col": 0,
"start_line": 250
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
k: LowParse.Spec.Base.parser_kind{LowParse.Spec.Combinators.fail_parser_kind_precond k} ->
t: Type ->
prf: (x: t -> FStar.Pervasives.Lemma (ensures Prims.l_False))
-> LowParse.Spec.Base.serializer (LowParse.Spec.Combinators.fail_parser k t) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Combinators.fail_parser_kind_precond",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.l_False",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Base.mk_serializer",
"LowParse.Spec.Combinators.fail_parser",
"FStar.Pervasives.false_elim",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.serializer"
] | [] | false | false | false | false | false | let fail_serializer
(k: parser_kind{fail_parser_kind_precond k})
(t: Type)
(prf: (x: t -> Lemma False))
: Tot (serializer (fail_parser k t)) =
| mk_serializer (fail_parser k t)
(fun x ->
prf x;
false_elim ())
(fun x -> prf x) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_false | val parse_false:parser parse_false_kind (squash False) | val parse_false:parser parse_false_kind (squash False) | let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 74,
"end_line": 263,
"start_col": 0,
"start_line": 263
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | LowParse.Spec.Base.parser LowParse.Spec.Combinators.parse_false_kind (Prims.squash Prims.l_False) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Combinators.fail_parser",
"LowParse.Spec.Combinators.parse_false_kind",
"Prims.squash",
"Prims.l_False"
] | [] | false | false | false | true | false | let parse_false:parser parse_false_kind (squash False) =
| fail_parser _ _ | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.and_then_cases_injective_precond | val and_then_cases_injective_precond
(#t #t': Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0 | val and_then_cases_injective_precond
(#t #t': Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0 | let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 299,
"start_col": 0,
"start_line": 287
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p': (_: t -> LowParse.Spec.Base.bare_parser t') ->
x1: t ->
x2: t ->
b1: LowParse.Bytes.bytes ->
b2: LowParse.Bytes.bytes
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Spec.Base.bare_parser",
"LowParse.Bytes.bytes",
"Prims.l_and",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"Prims.eq2",
"Prims.logical",
"FStar.Pervasives.Native.option"
] | [] | false | false | false | false | true | let and_then_cases_injective_precond
(#t #t': Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0 =
| Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\
(let Some (v1, _) = parse (p' x1) b1 in
let Some (v2, _) = parse (p' x2) b2 in
v1 == v2) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.synth_injective | val synth_injective (#t1 #t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 | val synth_injective (#t1 #t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 | let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x' | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 71,
"end_line": 569,
"start_col": 0,
"start_line": 564
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: t1 -> Prims.GTot t2) -> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.l_Forall",
"Prims.l_imp",
"Prims.eq2"
] | [] | false | false | false | false | true | let synth_injective (#t1 #t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 =
| forall (x: t1) (x': t1). {:pattern (f x); (f x')} f x == f x' ==> x == x' | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.fail_parser | val fail_parser (k: parser_kind) (t: Type)
: Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) | val fail_parser (k: parser_kind) (t: Type)
: Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) | let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 21,
"end_line": 248,
"start_col": 0,
"start_line": 242
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | k: LowParse.Spec.Base.parser_kind -> t: Type -> Prims.Pure (LowParse.Spec.Base.parser k t) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Combinators.tot_fail_parser",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.fail_parser_kind_precond",
"Prims.l_True"
] | [] | false | false | false | false | false | let fail_parser (k: parser_kind) (t: Type)
: Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) =
| tot_fail_parser k t | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.serialize_false | val serialize_false:serializer parse_false | val serialize_false:serializer parse_false | let serialize_false : serializer parse_false = fun input -> false_elim () | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 73,
"end_line": 265,
"start_col": 0,
"start_line": 265
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | LowParse.Spec.Base.serializer LowParse.Spec.Combinators.parse_false | Prims.Tot | [
"total"
] | [] | [
"Prims.squash",
"Prims.l_False",
"FStar.Pervasives.false_elim",
"LowParse.Bytes.bytes"
] | [] | false | false | false | true | false | let serialize_false:serializer parse_false =
| fun input -> false_elim () | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.and_then_bare | val and_then_bare (#t #t': Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t')))
: Tot (bare_parser t') | val and_then_bare (#t #t': Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t')))
: Tot (bare_parser t') | let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 18,
"end_line": 285,
"start_col": 0,
"start_line": 269
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Spec.Base.bare_parser t -> p': (_: t -> LowParse.Spec.Base.bare_parser t')
-> LowParse.Spec.Base.bare_parser t' | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.bare_parser",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.option",
"FStar.Seq.Base.slice",
"LowParse.Bytes.byte",
"FStar.Seq.Base.length"
] | [] | false | false | false | true | false | let and_then_bare (#t #t': Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t')))
: Tot (bare_parser t') =
| fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
let p'v = p' v in
let s':bytes = Seq.slice b l (Seq.length b) in
(match parse p'v s' with
| Some (v', l') ->
let res:consumed_length b = l + l' in
Some (v', res)
| None -> None)
| None -> None | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.tot_fail_parser | val tot_fail_parser (k: parser_kind) (t: Type)
: Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) | val tot_fail_parser (k: parser_kind) (t: Type)
: Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) | let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 20,
"end_line": 240,
"start_col": 0,
"start_line": 232
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | k: LowParse.Spec.Base.parser_kind -> t: Type -> Prims.Pure (LowParse.Spec.Base.tot_parser k t) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_strengthen",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Base.tot_bare_parser",
"LowParse.Spec.Combinators.fail_parser'",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Combinators.fail_parser_kind_precond",
"Prims.l_True"
] | [] | false | false | false | false | false | let tot_fail_parser (k: parser_kind) (t: Type)
: Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) =
| let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.bare_serialize_synth | val bare_serialize_synth
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
: Tot (bare_serializer t2) | val bare_serialize_synth
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
: Tot (bare_serializer t2) | let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 26,
"end_line": 676,
"start_col": 0,
"start_line": 667
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k t1 ->
f2: (_: t1 -> Prims.GTot t2) ->
s1: LowParse.Spec.Base.serializer p1 ->
g1: (_: t2 -> Prims.GTot t1)
-> LowParse.Spec.Base.bare_serializer t2 | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.bare_serializer"
] | [] | false | false | false | false | false | let bare_serialize_synth
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
: Tot (bare_serializer t2) =
| fun (x: t2) -> s1 (g1 x) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.and_then_cases_injective_intro | val and_then_cases_injective_intro
(#t #t': Type)
(p': (t -> Tot (bare_parser t')))
(lem:
(x1: t -> x2: t -> b1: bytes -> b2: bytes
-> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))))
: Lemma (and_then_cases_injective p') | val and_then_cases_injective_intro
(#t #t': Type)
(p': (t -> Tot (bare_parser t')))
(lem:
(x1: t -> x2: t -> b1: bytes -> b2: bytes
-> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))))
: Lemma (and_then_cases_injective p') | let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 108,
"end_line": 325,
"start_col": 0,
"start_line": 310
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p': (_: t -> LowParse.Spec.Base.bare_parser t') ->
lem:
(x1: t -> x2: t -> b1: LowParse.Bytes.bytes -> b2: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(requires LowParse.Spec.Combinators.and_then_cases_injective_precond p' x1 x2 b1 b2)
(ensures x1 == x2))
-> FStar.Pervasives.Lemma (ensures LowParse.Spec.Combinators.and_then_cases_injective p') | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.bare_parser",
"LowParse.Bytes.bytes",
"Prims.unit",
"LowParse.Spec.Combinators.and_then_cases_injective_precond",
"Prims.squash",
"Prims.eq2",
"Prims.Nil",
"FStar.Pervasives.pattern",
"FStar.Classical.forall_intro_3",
"Prims.l_Forall",
"Prims.l_imp",
"FStar.Classical.forall_intro",
"FStar.Classical.move_requires",
"Prims.l_True",
"LowParse.Spec.Combinators.and_then_cases_injective"
] | [] | false | false | true | false | false | let and_then_cases_injective_intro
(#t #t': Type)
(p': (t -> Tot (bare_parser t')))
(lem:
(x1: t -> x2: t -> b1: bytes -> b2: bytes
-> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))))
: Lemma (and_then_cases_injective p') =
| Classical.forall_intro_3 (fun x1 x2 b1 ->
Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.synth_inverse | val synth_inverse (#t1 #t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : GTot Type0 | val synth_inverse (#t1 #t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : GTot Type0 | let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 59,
"end_line": 699,
"start_col": 0,
"start_line": 693
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 ))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f2: (_: t1 -> Prims.GTot t2) -> g1: (_: t2 -> Prims.GTot t1) -> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"Prims.l_Forall",
"Prims.eq2"
] | [] | false | false | false | false | true | let synth_inverse (#t1 #t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : GTot Type0 =
| (forall (x: t2). {:pattern (f2 (g1 x))} f2 (g1 x) == x) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.synth_injective_intro' | val synth_injective_intro'
(#t1 #t2: Type)
(f: (t1 -> GTot t2))
(prf: (x: t1 -> x': t1 -> Lemma (requires (f x == f x')) (ensures (x == x'))))
: Lemma (synth_injective f) | val synth_injective_intro'
(#t1 #t2: Type)
(f: (t1 -> GTot t2))
(prf: (x: t1 -> x': t1 -> Lemma (requires (f x == f x')) (ensures (x == x'))))
: Lemma (synth_injective f) | let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 69,
"end_line": 593,
"start_col": 0,
"start_line": 580
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
f: (_: t1 -> Prims.GTot t2) ->
prf: (x: t1 -> x': t1 -> FStar.Pervasives.Lemma (requires f x == f x') (ensures x == x'))
-> FStar.Pervasives.Lemma (ensures LowParse.Spec.Combinators.synth_injective f) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.unit",
"Prims.eq2",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"FStar.Classical.forall_intro_2",
"Prims.l_imp",
"FStar.Classical.move_requires",
"Prims.l_True",
"LowParse.Spec.Combinators.synth_injective"
] | [] | false | false | true | false | false | let synth_injective_intro'
(#t1 #t2: Type)
(f: (t1 -> GTot t2))
(prf: (x: t1 -> x': t1 -> Lemma (requires (f x == f x')) (ensures (x == x'))))
: Lemma (synth_injective f) =
| Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x)) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.and_then_injective | val and_then_injective (#t #t': Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t')))
: Lemma
(requires (injective p /\ (forall (x: t). injective (p' x)) /\ and_then_cases_injective p'))
(ensures (injective (and_then_bare p p'))) | val and_then_injective (#t #t': Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t')))
: Lemma
(requires (injective p /\ (forall (x: t). injective (p' x)) /\ and_then_cases_injective p'))
(ensures (injective (and_then_bare p p'))) | let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 67,
"end_line": 367,
"start_col": 0,
"start_line": 327
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Spec.Base.bare_parser t -> p': (_: t -> LowParse.Spec.Base.bare_parser t')
-> FStar.Pervasives.Lemma
(requires
LowParse.Spec.Base.injective p /\ (forall (x: t). LowParse.Spec.Base.injective (p' x)) /\
LowParse.Spec.Combinators.and_then_cases_injective p')
(ensures LowParse.Spec.Base.injective (LowParse.Spec.Combinators.and_then_bare p p')) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.bare_parser",
"FStar.Classical.forall_intro_2",
"LowParse.Bytes.bytes",
"Prims.l_imp",
"LowParse.Spec.Base.injective_precond",
"LowParse.Spec.Base.injective_postcond",
"FStar.Classical.move_requires",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Base.consumed_length",
"Prims._assert",
"FStar.Seq.Properties.lemma_split",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"Prims.op_Addition",
"Prims.eq2",
"Prims.nat",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.injective",
"LowParse.Spec.Combinators.and_then_cases_injective_precond",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Seq.Base.length",
"LowParse.Spec.Combinators.and_then_bare",
"Prims.l_and",
"Prims.l_Forall",
"LowParse.Spec.Combinators.and_then_cases_injective"
] | [] | false | false | true | false | false | let and_then_injective (#t #t': Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t')))
: Lemma
(requires (injective p /\ (forall (x: t). injective (p' x)) /\ and_then_cases_injective p'))
(ensures (injective (and_then_bare p p'))) =
| let ps = and_then_bare p p' in
let f (b1 b2: bytes)
: Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) =
let Some (v1, len1) = p b1 in
let Some (v2, len2) = p b2 in
let b1':bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2':bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let Some (_, len1') = (p' v1) b1' in
let Some (_, len2') = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.and_then_kind | val and_then_kind (k1 k2: parser_kind) : Tot parser_kind | val and_then_kind (k1 k2: parser_kind) : Tot parser_kind | let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
} | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 468,
"start_col": 0,
"start_line": 446
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | k1: LowParse.Spec.Base.parser_kind -> k2: LowParse.Spec.Base.parser_kind
-> LowParse.Spec.Base.parser_kind | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.Mkparser_kind'",
"Prims.op_Addition",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low",
"LowParse.Spec.Base.bool_and",
"LowParse.Spec.Base.is_some",
"Prims.nat",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.some_v",
"Prims.bool",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.option",
"Prims.op_Equality",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"LowParse.Spec.Base.ParserConsumesAll",
"LowParse.Spec.Base.ParserStrong",
"LowParse.Spec.Combinators.and_then_metadata",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_metadata"
] | [] | false | false | false | true | false | let and_then_kind (k1 k2: parser_kind) : Tot parser_kind =
| {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high
=
if (is_some k1.parser_kind_high) `bool_and` (is_some k2.parser_kind_high)
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind
=
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else
if
(k1.parser_kind_subkind = Some ParserStrong)
`bool_and`
(k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else
if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
} | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.and_then_no_lookahead | val and_then_no_lookahead
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(#k': parser_kind)
(#t': Type)
(p': (t -> Tot (parser k' t')))
: Lemma (requires (and_then_cases_injective p'))
(ensures
((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==>
no_lookahead (and_then_bare p p'))) | val and_then_no_lookahead
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(#k': parser_kind)
(#t': Type)
(p': (t -> Tot (parser k' t')))
: Lemma (requires (and_then_cases_injective p'))
(ensures
((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==>
no_lookahead (and_then_bare p p'))) | let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else () | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 9,
"end_line": 486,
"start_col": 0,
"start_line": 470
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Spec.Base.parser k t -> p': (_: t -> LowParse.Spec.Base.parser k' t')
-> FStar.Pervasives.Lemma (requires LowParse.Spec.Combinators.and_then_cases_injective p')
(ensures
Mkparser_kind'?.parser_kind_subkind k ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong /\
Mkparser_kind'?.parser_kind_subkind k' ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong ==>
LowParse.Spec.Base.no_lookahead (LowParse.Spec.Combinators.and_then_bare p p')) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"Prims.op_AmpAmp",
"Prims.op_Equality",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"FStar.Classical.forall_intro_2",
"LowParse.Bytes.bytes",
"Prims.l_imp",
"Prims.l_and",
"LowParse.Spec.Base.no_lookahead",
"LowParse.Spec.Base.injective",
"Prims.l_Forall",
"LowParse.Spec.Base.no_lookahead_on",
"LowParse.Spec.Combinators.and_then_bare",
"FStar.Classical.move_requires",
"LowParse.Spec.Combinators.and_then_no_lookahead_on",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"Prims.bool",
"FStar.Classical.forall_intro",
"Prims.l_iff",
"LowParse.Spec.Base.parser_kind_prop",
"LowParse.Spec.Base.parser_kind_prop'",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.and_then_cases_injective",
"Prims.eq2"
] | [] | false | false | true | false | false | let and_then_no_lookahead
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(#k': parser_kind)
(#t': Type)
(p': (t -> Tot (parser k' t')))
: Lemma (requires (and_then_cases_injective p'))
(ensures
((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==>
no_lookahead (and_then_bare p p'))) =
| parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong
then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.and_then_no_lookahead_on | val and_then_no_lookahead_on
(#t #t': Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x x': bytes)
: Lemma (requires (no_lookahead p /\ injective p /\ (forall (x: t). no_lookahead (p' x))))
(ensures (no_lookahead_on (and_then_bare p p') x x')) | val and_then_no_lookahead_on
(#t #t': Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x x': bytes)
: Lemma (requires (no_lookahead p /\ injective p /\ (forall (x: t). no_lookahead (p' x))))
(ensures (no_lookahead_on (and_then_bare p p') x x')) | let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> () | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 13,
"end_line": 432,
"start_col": 0,
"start_line": 369
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p: LowParse.Spec.Base.bare_parser t ->
p': (_: t -> LowParse.Spec.Base.bare_parser t') ->
x: LowParse.Bytes.bytes ->
x': LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(requires
LowParse.Spec.Base.no_lookahead p /\ LowParse.Spec.Base.injective p /\
(forall (x: t). LowParse.Spec.Base.no_lookahead (p' x)))
(ensures
LowParse.Spec.Base.no_lookahead_on (LowParse.Spec.Combinators.and_then_bare p p') x x') | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.bare_parser",
"LowParse.Bytes.bytes",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Classical.move_requires",
"Prims.unit",
"Prims.eq2",
"FStar.Seq.Base.seq",
"FStar.Seq.Base.slice",
"Prims.l_and",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"Prims.logical",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"Prims._assert",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.no_lookahead_on",
"Prims.op_Addition",
"Prims.int",
"Prims.nat",
"LowParse.Spec.Base.injective_precond",
"Prims.bool",
"LowParse.Spec.Combinators.and_then_bare",
"LowParse.Spec.Base.no_lookahead",
"LowParse.Spec.Base.injective",
"Prims.l_Forall"
] | [] | false | false | true | false | false | let and_then_no_lookahead_on
(#t #t': Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x x': bytes)
: Lemma (requires (no_lookahead p /\ injective p /\ (forall (x: t). no_lookahead (p' x))))
(ensures (no_lookahead_on (and_then_bare p p') x x')) =
| let f = and_then_bare p p' in
match f x with
| Some v ->
let y, off = v in
let off:nat = off in
let off_x:consumed_length x = off in
if off <= Seq.length x'
then
let off_x':consumed_length x' = off in
let g ()
: Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures
(Some? (f x') /\
(let Some v' = f x' in
let y', off' = v' in
y == y'))) =
assert (Some? (p x));
let Some (y1, off1) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let Some v1' = p x' in
let y1', off1' = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2:bytes = Seq.slice x off1 (Seq.length x) in
let x2':bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let Some (y2, off2) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let Some v2' = p2 x2' in
let y2', _ = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
| _ -> () | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_fret' | val parse_fret' (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (bare_parser t') | val parse_fret' (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (bare_parser t') | let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 56,
"end_line": 557,
"start_col": 0,
"start_line": 556
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: t -> Prims.GTot t') -> v: t -> LowParse.Spec.Base.bare_parser t' | Prims.Tot | [
"total"
] | [] | [
"LowParse.Bytes.bytes",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.bare_parser"
] | [] | false | false | false | false | false | let parse_fret' (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (bare_parser t') =
| fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_fret | val parse_fret (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (parser parse_ret_kind t') | val parse_fret (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (parser parse_ret_kind t') | let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 17,
"end_line": 562,
"start_col": 0,
"start_line": 560
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: t -> Prims.GTot t') -> v: t
-> LowParse.Spec.Base.parser LowParse.Spec.Combinators.parse_ret_kind t' | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Combinators.parse_fret'",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.parse_ret_kind",
"LowParse.Spec.Base.parser"
] | [] | false | false | false | false | false | let parse_fret (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (parser parse_ret_kind t') =
| [@@ inline_let ]let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_synth_eq2 | val parse_synth_eq2
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) | val parse_synth_eq2
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) | let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 24,
"end_line": 639,
"start_col": 0,
"start_line": 629
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k t1 ->
f2: (_: t1 -> Prims.GTot t2) ->
sq: Prims.squash (LowParse.Spec.Combinators.synth_injective f2) ->
b: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Combinators.parse_synth p1 f2) b ==
LowParse.Spec.Combinators.parse_synth' p1 f2 b) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"Prims.squash",
"LowParse.Spec.Combinators.synth_injective",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.parse_synth_eq",
"Prims.unit",
"Prims.l_True",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Combinators.parse_synth'",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let parse_synth_eq2
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) =
| parse_synth_eq p1 f2 b | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_synth' | val parse_synth' (#k: parser_kind) (#t1 #t2: Type) (p1: parser k t1) (f2: (t1 -> GTot t2))
: Tot (bare_parser t2) | val parse_synth' (#k: parser_kind) (#t1 #t2: Type) (p1: parser k t1) (f2: (t1 -> GTot t2))
: Tot (bare_parser t2) | let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 49,
"end_line": 604,
"start_col": 0,
"start_line": 595
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p1: LowParse.Spec.Base.parser k t1 -> f2: (_: t1 -> Prims.GTot t2)
-> LowParse.Spec.Base.bare_parser t2 | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.parse",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.bare_parser"
] | [] | false | false | false | false | false | let parse_synth' (#k: parser_kind) (#t1 #t2: Type) (p1: parser k t1) (f2: (t1 -> GTot t2))
: Tot (bare_parser t2) =
| fun b ->
match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.synth_tagged_union_data | val synth_tagged_union_data
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t | val synth_tagged_union_data
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t | let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 902,
"start_col": 0,
"start_line": 895
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
tg: tag_t ->
x: LowParse.Spec.Base.refine_with_tag tag_of_data tg
-> data_t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.refine_with_tag"
] | [] | false | false | false | false | false | let synth_tagged_union_data
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t =
| x | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.tot_parse_synth_eq | val tot_parse_synth_eq
(#k: parser_kind)
(#t1 #t2: Type)
(p1: tot_parser k t1)
(f2: (t1 -> Tot t2))
(b: bytes)
: Lemma (requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b)) | val tot_parse_synth_eq
(#k: parser_kind)
(#t1 #t2: Type)
(p1: tot_parser k t1)
(f2: (t1 -> Tot t2))
(b: bytes)
: Lemma (requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b)) | let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 27,
"end_line": 665,
"start_col": 0,
"start_line": 655
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p1: LowParse.Spec.Base.tot_parser k t1 -> f2: (_: t1 -> t2) -> b: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma (requires LowParse.Spec.Combinators.synth_injective f2)
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Combinators.tot_parse_synth p1 f2) b ==
LowParse.Spec.Combinators.parse_synth' p1 f2 b) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.parse_synth_eq",
"Prims.unit",
"LowParse.Spec.Combinators.synth_injective",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Combinators.tot_parse_synth",
"LowParse.Spec.Combinators.parse_synth'",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let tot_parse_synth_eq
(#k: parser_kind)
(#t1 #t2: Type)
(p1: tot_parser k t1)
(f2: (t1 -> Tot t2))
(b: bytes)
: Lemma (requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b)) =
| parse_synth_eq #k p1 f2 b | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.and_then_correct | val and_then_correct
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(#k': parser_kind)
(#t': Type)
(p': (t -> Tot (parser k' t')))
: Lemma (requires (and_then_cases_injective p'))
(ensures
(injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p'))) | val and_then_correct
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(#k': parser_kind)
(#t': Type)
(p': (t -> Tot (parser k' t')))
: Lemma (requires (and_then_cases_injective p'))
(ensures
(injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p'))) | let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p' | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 28,
"end_line": 509,
"start_col": 0,
"start_line": 490
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 8,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 64,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Spec.Base.parser k t -> p': (_: t -> LowParse.Spec.Base.parser k' t')
-> FStar.Pervasives.Lemma (requires LowParse.Spec.Combinators.and_then_cases_injective p')
(ensures
LowParse.Spec.Base.injective (LowParse.Spec.Combinators.and_then_bare p p') /\
LowParse.Spec.Base.parser_kind_prop (LowParse.Spec.Combinators.and_then_kind k k')
(LowParse.Spec.Combinators.and_then_bare p p')) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.and_then_no_lookahead",
"Prims.unit",
"LowParse.Spec.Combinators.and_then_injective",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.and_then_bare",
"FStar.Classical.forall_intro",
"Prims.l_iff",
"LowParse.Spec.Base.parser_kind_prop",
"LowParse.Spec.Base.parser_kind_prop'",
"LowParse.Spec.Combinators.and_then_cases_injective",
"Prims.squash",
"Prims.l_and",
"LowParse.Spec.Base.injective",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let and_then_correct
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(#k': parser_kind)
(#t': Type)
(p': (t -> Tot (parser k' t')))
: Lemma (requires (and_then_cases_injective p'))
(ensures
(injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p'))) =
| parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p' | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.synth_inverse_intro' | val synth_inverse_intro'
(#t1 #t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2 -> Lemma (f2 (g1 x) == x)))
: Lemma (ensures (synth_inverse f2 g1)) | val synth_inverse_intro'
(#t1 #t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2 -> Lemma (f2 (g1 x) == x)))
: Lemma (ensures (synth_inverse f2 g1)) | let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 28,
"end_line": 719,
"start_col": 0,
"start_line": 711
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
f2: (_: t1 -> Prims.GTot t2) ->
g1: (_: t2 -> Prims.GTot t1) ->
prf: (x: t2 -> FStar.Pervasives.Lemma (ensures f2 (g1 x) == x))
-> FStar.Pervasives.Lemma (ensures LowParse.Spec.Combinators.synth_inverse f2 g1) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"Prims.Nil",
"FStar.Pervasives.pattern",
"FStar.Classical.forall_intro",
"LowParse.Spec.Combinators.synth_inverse"
] | [] | false | false | true | false | false | let synth_inverse_intro'
(#t1 #t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2 -> Lemma (f2 (g1 x) == x)))
: Lemma (ensures (synth_inverse f2 g1)) =
| Classical.forall_intro prf | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.synth_inverse_synth_injective_pat | val synth_inverse_synth_injective_pat (#t1 #t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1))
: Lemma (requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)] | val synth_inverse_synth_injective_pat (#t1 #t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1))
: Lemma (requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)] | let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 62,
"end_line": 730,
"start_col": 0,
"start_line": 721
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: t1 -> Prims.GTot t2) -> g: (_: t2 -> Prims.GTot t1)
-> FStar.Pervasives.Lemma (requires LowParse.Spec.Combinators.synth_inverse g f)
(ensures LowParse.Spec.Combinators.synth_injective f)
[SMTPat (LowParse.Spec.Combinators.synth_inverse g f)] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims._assert",
"Prims.l_Forall",
"Prims.l_imp",
"Prims.eq2",
"Prims.unit",
"LowParse.Spec.Combinators.synth_inverse",
"Prims.squash",
"LowParse.Spec.Combinators.synth_injective",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | false | false | true | false | false | let synth_inverse_synth_injective_pat (#t1 #t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1))
: Lemma (requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)] =
| assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2)) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.bare_serialize_tagged_union | val bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(s: (t: tag_t -> Tot (serializer (p t))))
: Tot (bare_serializer data_t) | val bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(s: (t: tag_t -> Tot (serializer (p t))))
: Tot (bare_serializer data_t) | let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 41,
"end_line": 1065,
"start_col": 0,
"start_line": 1052
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
st: LowParse.Spec.Base.serializer pt ->
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
s: (t: tag_t -> LowParse.Spec.Base.serializer (p t))
-> LowParse.Spec.Base.bare_serializer data_t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Base.refine_with_tag",
"FStar.Seq.Base.append",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.serialize",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.bare_serializer"
] | [] | false | false | false | false | false | let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(s: (t: tag_t -> Tot (serializer (p t))))
: Tot (bare_serializer data_t) =
| fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.synth_injective_synth_inverse_synth_inverse_recip | val synth_injective_synth_inverse_synth_inverse_recip
(#t1 #t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g)) | val synth_injective_synth_inverse_synth_inverse_recip
(#t1 #t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g)) | let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 40,
"end_line": 758,
"start_col": 0,
"start_line": 751
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
g: (_: t2 -> Prims.GTot t1) ->
f: (_: t1 -> Prims.GTot t2) ->
u464:
Prims.squash (LowParse.Spec.Combinators.synth_inverse g f /\
LowParse.Spec.Combinators.synth_injective g)
-> Prims.squash (LowParse.Spec.Combinators.synth_inverse f g) | Prims.Tot | [
"total"
] | [] | [
"Prims.squash",
"Prims.l_and",
"LowParse.Spec.Combinators.synth_inverse",
"LowParse.Spec.Combinators.synth_injective",
"Prims._assert",
"Prims.l_Forall",
"Prims.eq2"
] | [] | false | false | true | false | false | let synth_injective_synth_inverse_synth_inverse_recip
(#t1 #t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g)) =
| assert (forall x. g (f (g x)) == g x) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.bare_serialize_dtuple2 | val bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1 -> parser k2 (t2 x)))
(s2: (x: t1 -> serializer (p2 x)))
(xy: dtuple2 t1 t2)
: GTot bytes | val bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1 -> parser k2 (t2 x)))
(s2: (x: t1 -> serializer (p2 x)))
(xy: dtuple2 t1 t2)
: GTot bytes | let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 72,
"end_line": 1294,
"start_col": 0,
"start_line": 1283
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: (x: t1 -> LowParse.Spec.Base.serializer (p2 x)) ->
xy: Prims.dtuple2 t1 t2
-> Prims.GTot LowParse.Bytes.bytes | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"Prims.dtuple2",
"FStar.Seq.Base.append",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.serialize",
"FStar.Pervasives.dfst",
"FStar.Pervasives.dsnd",
"LowParse.Bytes.bytes"
] | [] | false | false | false | false | false | let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1 -> parser k2 (t2 x)))
(s2: (x: t1 -> serializer (p2 x)))
(xy: dtuple2 t1 t2)
: GTot bytes =
| (serialize s1 (dfst xy)) `Seq.append` (serialize (s2 (dfst xy)) (dsnd xy)) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.serialize_synth_eq' | val serialize_synth_eq'
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
(u: unit{synth_inverse f2 g1 /\ synth_injective f2})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma (ensures (y1 == y2)) | val serialize_synth_eq'
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
(u: unit{synth_inverse f2 g1 /\ synth_injective f2})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma (ensures (y1 == y2)) | let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 36,
"end_line": 809,
"start_col": 0,
"start_line": 790
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k t1 ->
f2: (_: t1 -> Prims.GTot t2) ->
s1: LowParse.Spec.Base.serializer p1 ->
g1: (_: t2 -> Prims.GTot t1) ->
u523:
u533:
Prims.unit
{ LowParse.Spec.Combinators.synth_inverse f2 g1 /\
LowParse.Spec.Combinators.synth_injective f2 } ->
x: t2 ->
y1: LowParse.Bytes.bytes ->
q1:
Prims.squash (y1 ==
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_synth p1 f2 s1 g1 u523)
x) ->
y2: LowParse.Bytes.bytes ->
q2: Prims.squash (y2 == LowParse.Spec.Base.serialize s1 (g1 x))
-> FStar.Pervasives.Lemma (ensures y1 == y2) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.unit",
"Prims.l_and",
"LowParse.Spec.Combinators.synth_inverse",
"LowParse.Spec.Combinators.synth_injective",
"LowParse.Bytes.bytes",
"Prims.squash",
"Prims.eq2",
"LowParse.Spec.Base.serialize",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Combinators.serialize_synth",
"LowParse.Spec.Combinators.serialize_synth_eq",
"Prims.l_True",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let serialize_synth_eq'
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
(u: unit{synth_inverse f2 g1 /\ synth_injective f2})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma (ensures (y1 == y2)) =
| serialize_synth_eq p1 f2 s1 g1 u x | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.bare_serialize_nondep_then | val bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2)) | val bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2)) | let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 28,
"end_line": 1369,
"start_col": 0,
"start_line": 1357
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k1 t1 ->
s1: LowParse.Spec.Base.serializer p1 ->
p2: LowParse.Spec.Base.parser k2 t2 ->
s2: LowParse.Spec.Base.serializer p2
-> LowParse.Spec.Base.bare_serializer (t1 * t2) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"FStar.Pervasives.Native.tuple2",
"FStar.Seq.Base.append",
"LowParse.Bytes.byte",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.bare_serializer"
] | [] | false | false | false | false | false | let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2)) =
| fun (x: t1 * t2) ->
let x1, x2 = x in
Seq.append (s1 x1) (s2 x2) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_tagged_union_payload | val parse_tagged_union_payload
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(tg: tag_t)
: Tot (parser k data_t) | val parse_tagged_union_payload
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(tg: tag_t)
: Tot (parser k data_t) | let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 98,
"end_line": 912,
"start_col": 0,
"start_line": 904
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
p: (t: tag_t -> LowParse.Spec.Base.parser k (LowParse.Spec.Base.refine_with_tag tag_of_data t)) ->
tg: tag_t
-> LowParse.Spec.Base.parser k data_t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Combinators.synth_tagged_union_data"
] | [] | false | false | false | false | false | let parse_tagged_union_payload
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(tg: tag_t)
: Tot (parser k data_t) =
| parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.bare_parse_tagged_union | val bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t -> Tot parser_kind))
(p: (t: tag_t -> Tot (parser (k' t) (refine_with_tag tag_of_data t))))
(input: bytes)
: GTot (option (data_t * consumed_length input)) | val bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t -> Tot parser_kind))
(p: (t: tag_t -> Tot (parser (k' t) (refine_with_tag tag_of_data t))))
(input: bytes)
: GTot (option (data_t * consumed_length input)) | let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 7,
"end_line": 974,
"start_col": 0,
"start_line": 957
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
pt: LowParse.Spec.Base.parser kt tag_t ->
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
k': (t: tag_t -> LowParse.Spec.Base.parser_kind) ->
p:
(t: tag_t
-> LowParse.Spec.Base.parser (k' t) (LowParse.Spec.Base.refine_with_tag tag_of_data t)) ->
input: LowParse.Bytes.bytes
-> Prims.GTot (FStar.Pervasives.Native.option (data_t * LowParse.Spec.Base.consumed_length input)) | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.parse",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Pervasives.Native.option",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length"
] | [] | false | false | false | false | false | let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t -> Tot parser_kind))
(p: (t: tag_t -> Tot (parser (k' t) (refine_with_tag tag_of_data t))))
(input: bytes)
: GTot (option (data_t * consumed_length input)) =
| match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.serialize_strengthen' | val serialize_strengthen'
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1{p2 input})
: GTot bytes | val serialize_strengthen'
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1{p2 input})
: GTot bytes | let serialize_strengthen'
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: GTot bytes
= serialize s input | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 19,
"end_line": 1692,
"start_col": 0,
"start_line": 1683
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x))
let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None
let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf))
= let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2)
let bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2);
assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2)
let bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
bare_parse_strengthen_no_lookahead p1 p2 prf;
bare_parse_strengthen_injective p1 p2 prf;
parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf);
()
let parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (parser k (x: t1 { p2 x } ))
= bare_parse_strengthen_correct p1 p2 prf;
bare_parse_strengthen p1 p2 prf | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p2: (_: t1 -> Prims.GTot Type0) ->
prf: LowParse.Spec.Combinators.parse_strengthen_prf p1 p2 ->
s: LowParse.Spec.Base.serializer p1 ->
input: t1{p2 input}
-> Prims.GTot LowParse.Bytes.bytes | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.parse_strengthen_prf",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Base.serialize",
"LowParse.Bytes.bytes"
] | [] | false | false | false | false | false | let serialize_strengthen'
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1{p2 input})
: GTot bytes =
| serialize s input | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.tot_parse_tagged_union_payload | val tot_parse_tagged_union_payload
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (tot_parser k (refine_with_tag tag_of_data t))))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures
(fun y -> forall x. parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x)) | val tot_parse_tagged_union_payload
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (tot_parser k (refine_with_tag tag_of_data t))))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures
(fun y -> forall x. parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x)) | let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 102,
"end_line": 1015,
"start_col": 0,
"start_line": 1003
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
tag_of_data: (_: data_t -> tag_t) ->
p:
(t: tag_t
-> LowParse.Spec.Base.tot_parser k (LowParse.Spec.Base.refine_with_tag tag_of_data t)) ->
tg: tag_t
-> Prims.Pure (LowParse.Spec.Base.tot_parser k data_t) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Combinators.tot_parse_synth",
"LowParse.Spec.Combinators.synth_tagged_union_data",
"Prims.l_True",
"Prims.l_Forall",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Combinators.parse_tagged_union_payload"
] | [] | false | false | false | false | false | let tot_parse_tagged_union_payload
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (tot_parser k (refine_with_tag tag_of_data t))))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures
(fun y -> forall x. parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x)) =
| tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_tagged_union_payload_and_then_cases_injective | val parse_tagged_union_payload_and_then_cases_injective
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
: Lemma (and_then_cases_injective (parse_tagged_union_payload tag_of_data p)) | val parse_tagged_union_payload_and_then_cases_injective
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
: Lemma (and_then_cases_injective (parse_tagged_union_payload tag_of_data p)) | let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 925,
"start_col": 0,
"start_line": 914
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
p: (t: tag_t -> LowParse.Spec.Base.parser k (LowParse.Spec.Base.refine_with_tag tag_of_data t))
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Combinators.and_then_cases_injective (LowParse.Spec.Combinators.parse_tagged_union_payload
tag_of_data
p)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Combinators.and_then_cases_injective_intro",
"LowParse.Spec.Combinators.parse_tagged_union_payload",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.parse_synth_eq",
"LowParse.Spec.Combinators.synth_tagged_union_data",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"LowParse.Spec.Combinators.and_then_cases_injective",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let parse_tagged_union_payload_and_then_cases_injective
(#tag_t #data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
: Lemma (and_then_cases_injective (parse_tagged_union_payload tag_of_data p)) =
| and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p)
(fun x1 x2 b1 b2 ->
parse_synth_eq #k
#(refine_with_tag tag_of_data x1)
(p x1)
(synth_tagged_union_data tag_of_data x1)
b1;
parse_synth_eq #k
#(refine_with_tag tag_of_data x2)
(p x2)
(synth_tagged_union_data tag_of_data x2)
b2) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.compose | val compose (#t1 #t2 #t3: Type) (f1: (t1 -> GTot t2)) (f2: (t2 -> GTot t3)) (x: t1) : GTot t3 | val compose (#t1 #t2 #t3: Type) (f1: (t1 -> GTot t2)) (f2: (t2 -> GTot t3)) (x: t1) : GTot t3 | let compose (#t1 #t2 #t3: Type) (f1: t1 -> GTot t2) (f2: t2 -> GTot t3) (x: t1) : GTot t3 =
let y1 = f1 x in
f2 y1 | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 7,
"end_line": 1720,
"start_col": 0,
"start_line": 1718
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x))
let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None
let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf))
= let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2)
let bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2);
assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2)
let bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
bare_parse_strengthen_no_lookahead p1 p2 prf;
bare_parse_strengthen_injective p1 p2 prf;
parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf);
()
let parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (parser k (x: t1 { p2 x } ))
= bare_parse_strengthen_correct p1 p2 prf;
bare_parse_strengthen p1 p2 prf
let serialize_strengthen'
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: GTot bytes
= serialize s input
let serialize_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: Lemma
(let output = serialize_strengthen' p2 prf s input in
parse (parse_strengthen p1 p2 prf) output == Some (input, Seq.length output))
= ()
let serialize_strengthen
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
: Tot (serializer (parse_strengthen p1 p2 prf))
= Classical.forall_intro (serialize_strengthen_correct p2 prf s);
serialize_strengthen' p2 prf s | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f1: (_: t1 -> Prims.GTot t2) -> f2: (_: t2 -> Prims.GTot t3) -> x: t1 -> Prims.GTot t3 | Prims.GTot | [
"sometrivial"
] | [] | [] | [] | false | false | false | false | false | let compose (#t1 #t2 #t3: Type) (f1: (t1 -> GTot t2)) (f2: (t2 -> GTot t3)) (x: t1) : GTot t3 =
| let y1 = f1 x in
f2 y1 | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.lift_parser' | val lift_parser' (#k: parser_kind) (#t: Type) (f: (unit -> GTot (parser k t))) : Tot (bare_parser t) | val lift_parser' (#k: parser_kind) (#t: Type) (f: (unit -> GTot (parser k t))) : Tot (bare_parser t) | let lift_parser'
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Tot (bare_parser t)
= fun (input: bytes) -> parse (f ()) input | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 42,
"end_line": 1747,
"start_col": 0,
"start_line": 1742
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x))
let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None
let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf))
= let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2)
let bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2);
assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2)
let bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
bare_parse_strengthen_no_lookahead p1 p2 prf;
bare_parse_strengthen_injective p1 p2 prf;
parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf);
()
let parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (parser k (x: t1 { p2 x } ))
= bare_parse_strengthen_correct p1 p2 prf;
bare_parse_strengthen p1 p2 prf
let serialize_strengthen'
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: GTot bytes
= serialize s input
let serialize_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: Lemma
(let output = serialize_strengthen' p2 prf s input in
parse (parse_strengthen p1 p2 prf) output == Some (input, Seq.length output))
= ()
let serialize_strengthen
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
: Tot (serializer (parse_strengthen p1 p2 prf))
= Classical.forall_intro (serialize_strengthen_correct p2 prf s);
serialize_strengthen' p2 prf s
let compose (#t1 #t2 #t3: Type) (f1: t1 -> GTot t2) (f2: t2 -> GTot t3) (x: t1) : GTot t3 =
let y1 = f1 x in
f2 y1
val make_total_constant_size_parser_compose
(sz: nat)
(t1 t2: Type)
(f1: ((s: bytes {Seq.length s == sz}) -> GTot t1))
(g2: t1 -> GTot t2)
: Lemma
(requires (
make_total_constant_size_parser_precond sz t1 f1 /\
(forall x x' . g2 x == g2 x' ==> x == x')
))
(ensures (
make_total_constant_size_parser_precond sz t1 f1 /\
make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\
(forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\
(forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input)
))
(** Tot vs. Ghost *) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: Prims.unit -> Prims.GTot (LowParse.Spec.Base.parser k t)) -> LowParse.Spec.Base.bare_parser t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"Prims.unit",
"LowParse.Spec.Base.parser",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.parse",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.bare_parser"
] | [] | false | false | false | false | false | let lift_parser' (#k: parser_kind) (#t: Type) (f: (unit -> GTot (parser k t))) : Tot (bare_parser t) =
| fun (input: bytes) -> parse (f ()) input | false |
OWGCounter.fst | OWGCounter.rewrite_perm | val rewrite_perm (#a: Type) (#v: G.erased a) (r: ghost_ref a) (p1 p2: P.perm)
: Steel unit
(ghost_pts_to r p1 v)
(fun _ -> ghost_pts_to r p2 v)
(fun _ -> p1 == p2)
(fun _ _ _ -> True) | val rewrite_perm (#a: Type) (#v: G.erased a) (r: ghost_ref a) (p1 p2: P.perm)
: Steel unit
(ghost_pts_to r p1 v)
(fun _ -> ghost_pts_to r p2 v)
(fun _ -> p1 == p2)
(fun _ _ _ -> True) | let rewrite_perm(#a:Type) (#v:G.erased a) (r:ghost_ref a) (p1 p2:P.perm)
: Steel unit
(ghost_pts_to r p1 v)
(fun _ -> ghost_pts_to r p2 v)
(fun _ -> p1 == p2)
(fun _ _ _ -> True)
= rewrite_slprop (ghost_pts_to r p1 v)
(ghost_pts_to r p2 v)
(fun _ -> ()) | {
"file_name": "share/steel/examples/steel/OWGCounter.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 31,
"end_line": 196,
"start_col": 0,
"start_line": 188
} | (*
Copyright 2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
(*
* An implementation of the parallel counter presented by Owicki and Gries
* "Verifying properties of parallel programs: An axiomatic approach.", CACM'76
*
* In this example, the main thread forks two worker thread that both
* increment a shared counter. The goal of the example is to show that
* after both the worker threads are done, the value of the counter is
* its original value + 2.
*
* See http://pm.inf.ethz.ch/publications/getpdf.php for an implementation
* of the OWG counters in the Chalice framework.
*)
module OWGCounter
module G = FStar.Ghost
open Steel.Memory
open Steel.FractionalPermission
open Steel.Reference
open Steel.SpinLock
open Steel.Effect.Atomic
open Steel.Effect
module R = Steel.Reference
module P = Steel.FractionalPermission
module A = Steel.Effect.Atomic
#set-options "--ide_id_info_off --using_facts_from '* -FStar.Tactics -FStar.Reflection' --fuel 0 --ifuel 0"
let half_perm = half_perm full_perm
(* Some basic wrappers to avoid issues with normalization.
TODO: The frame inference tactic should not normalize fst and snd*)
noextract
let fst = fst
noextract
let snd = snd
/// The core invariant of the Owicki-Gries counter, shared by the two parties.
/// The concrete counter [r] is shared, and the full permission is stored in the invariant.
/// Each party also has half permission to their own ghost counter [r1] or [r2], ensuring that
/// only them can modify it by retrieving the other half of the permission when accessing the invariant.
/// The `__reduce__` attribute indicates the frame inference tactic to unfold this predicate for frame inference only
[@@ __reduce__]
let lock_inv_slprop (r:ref int) (r1 r2:ghost_ref int) (w:int & int) =
ghost_pts_to r1 half_perm (fst w) `star`
ghost_pts_to r2 half_perm (snd w) `star`
pts_to r full_perm (fst w + snd w)
[@@ __reduce__]
let lock_inv_pred (r:ref int) (r1 r2:ghost_ref int) =
fun (x:int & int) -> lock_inv_slprop r r1 r2 x
/// The actual invariant, existentially quantifying over the values currently stored in the two ghost references
[@@ __reduce__]
let lock_inv (r:ref int) (r1 r2:ghost_ref int) : vprop =
h_exists (lock_inv_pred r r1 r2)
#push-options "--warn_error -271 --fuel 1 --ifuel 1"
/// A helper lemma to reason about the lock invariant
let lock_inv_equiv_lemma (r:ref int) (r1 r2:ghost_ref int)
: Lemma (lock_inv r r1 r2 `equiv` lock_inv r r2 r1)
=
let aux (r:ref int) (r1 r2:ghost_ref int) (m:mem)
: Lemma
(requires interp (hp_of (lock_inv r r1 r2)) m)
(ensures interp (hp_of (lock_inv r r2 r1)) m)
[SMTPat ()]
= assert (
Steel.Memory.h_exists #(int & int) (fun x -> hp_of (lock_inv_pred r r1 r2 x)) ==
h_exists_sl #(int & int) (lock_inv_pred r r1 r2))
by (FStar.Tactics.norm [delta_only [`%h_exists_sl]]);
let w : G.erased (int & int) = id_elim_exists (fun x -> hp_of (lock_inv_pred r r1 r2 x)) m in
assert ((ghost_pts_to r1 half_perm (snd (snd w, fst w)) `star`
ghost_pts_to r2 half_perm (fst (snd w, fst w)) `star`
pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w))) `equiv`
(ghost_pts_to r2 half_perm (fst (snd w, fst w)) `star`
ghost_pts_to r1 half_perm (snd (snd w, fst w)) `star`
pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w)))) by (FStar.Tactics.norm [delta_attr [`%__steel_reduce__]]; canon' false (`true_p) (`true_p));
reveal_equiv
(ghost_pts_to r1 half_perm (snd (snd w, fst w)) `star`
ghost_pts_to r2 half_perm (fst (snd w, fst w)) `star`
pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w)))
(ghost_pts_to r2 half_perm (fst (snd w, fst w)) `star`
ghost_pts_to r1 half_perm (snd (snd w, fst w)) `star`
pts_to r full_perm (fst (snd w, fst w) + snd (snd w, fst w)));
assert (interp (hp_of (lock_inv_pred r r2 r1 (snd w, fst w))) m);
intro_h_exists (snd w, fst w) (fun x -> hp_of (lock_inv_pred r r2 r1 x)) m;
assert (interp (Steel.Memory.h_exists (fun x -> hp_of (lock_inv_pred r r2 r1 x))) m);
assert (
Steel.Memory.h_exists #(int & int) (fun x -> hp_of (lock_inv_pred r r2 r1 x)) ==
h_exists_sl #(int & int) (lock_inv_pred r r2 r1))
by (FStar.Tactics.norm [delta_only [`%h_exists_sl]])
in
reveal_equiv (lock_inv r r1 r2) (lock_inv r r2 r1)
#pop-options
/// Acquiring the shared lock invariant
inline_for_extraction noextract
let og_acquire (r:ref int) (r_mine r_other:ghost_ref int) (b:G.erased bool)
(l:lock (lock_inv r (if b then r_mine else r_other)
(if b then r_other else r_mine)))
: SteelT unit
emp
(fun _ -> lock_inv r r_mine r_other)
= acquire l;
if b then begin
rewrite_slprop (lock_inv r (if b then r_mine else r_other)
(if b then r_other else r_mine))
(lock_inv r r_mine r_other)
(fun _ -> ());
()
end
else begin
rewrite_slprop (lock_inv r (if b then r_mine else r_other)
(if b then r_other else r_mine))
(lock_inv r r_other r_mine)
(fun _ -> ());
lock_inv_equiv_lemma r r_other r_mine;
rewrite_slprop (lock_inv r r_other r_mine) (lock_inv r r_mine r_other) (fun _ -> reveal_equiv (lock_inv r r_other r_mine) (lock_inv r r_mine r_other))
end
/// Releasing the shared lock invariant
inline_for_extraction noextract
let og_release (r:ref int) (r_mine r_other:ghost_ref int) (b:G.erased bool)
(l:lock (lock_inv r (if b then r_mine else r_other)
(if b then r_other else r_mine)))
: SteelT unit
(lock_inv r r_mine r_other)
(fun _ -> emp)
= if b then begin
rewrite_slprop (lock_inv r r_mine r_other)
(lock_inv r (if b then r_mine else r_other)
(if b then r_other else r_mine))
(fun _ -> ());
()
end
else begin
lock_inv_equiv_lemma r r_mine r_other;
rewrite_slprop (lock_inv r r_mine r_other) (lock_inv r r_other r_mine) (fun _ -> reveal_equiv (lock_inv r r_mine r_other) (lock_inv r r_other r_mine));
rewrite_slprop (lock_inv r r_other r_mine)
(lock_inv r (if b then r_mine else r_other)
(if b then r_other else r_mine))
(fun _ -> ())
end;
release l
inline_for_extraction noextract
let incr_ctr (#v:G.erased int) (r:ref int)
: SteelT unit
(pts_to r full_perm v)
(fun _ -> pts_to r full_perm (v+1))
= let n = R.read_pt r in
R.write_pt r (n+1);
rewrite_slprop (pts_to r full_perm (n + 1))
(pts_to r full_perm (v+1))
(fun _ -> ()) | {
"checked_file": "/",
"dependencies": [
"Steel.SpinLock.fsti.checked",
"Steel.Reference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "OWGCounter.fst"
} | [
{
"abbrev": true,
"full_module": "Steel.Effect.Atomic",
"short_module": "A"
},
{
"abbrev": true,
"full_module": "Steel.FractionalPermission",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "Steel.Reference",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.SpinLock",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Reference",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
r: Steel.Reference.ghost_ref a ->
p1: Steel.FractionalPermission.perm ->
p2: Steel.FractionalPermission.perm
-> Steel.Effect.Steel Prims.unit | Steel.Effect.Steel | [] | [] | [
"FStar.Ghost.erased",
"Steel.Reference.ghost_ref",
"Steel.FractionalPermission.perm",
"Steel.Effect.Atomic.rewrite_slprop",
"FStar.Ghost.hide",
"FStar.Set.set",
"Steel.Memory.iname",
"FStar.Set.empty",
"Steel.Reference.ghost_pts_to",
"FStar.Ghost.reveal",
"Steel.Memory.mem",
"Prims.unit",
"Steel.Effect.Common.vprop",
"Steel.Effect.Common.rmem",
"Prims.eq2",
"Prims.l_True"
] | [] | false | true | false | false | false | let rewrite_perm (#a: Type) (#v: G.erased a) (r: ghost_ref a) (p1 p2: P.perm)
: Steel unit
(ghost_pts_to r p1 v)
(fun _ -> ghost_pts_to r p2 v)
(fun _ -> p1 == p2)
(fun _ _ _ -> True) =
| rewrite_slprop (ghost_pts_to r p1 v) (ghost_pts_to r p2 v) (fun _ -> ()) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.seq_slice_append_r | val seq_slice_append_r (#t: Type) (s1 s2: Seq.seq t)
: Lemma (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2) | val seq_slice_append_r (#t: Type) (s1 s2: Seq.seq t)
: Lemma (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2) | let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 102,
"end_line": 1079,
"start_col": 0,
"start_line": 1074
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s1: FStar.Seq.Base.seq t -> s2: FStar.Seq.Base.seq t
-> FStar.Pervasives.Lemma
(ensures
FStar.Seq.Base.slice (FStar.Seq.Base.append s1 s2)
(FStar.Seq.Base.length s1)
(FStar.Seq.Base.length (FStar.Seq.Base.append s1 s2)) ==
s2) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Seq.Base.seq",
"Prims._assert",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.append",
"FStar.Seq.Base.length",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let seq_slice_append_r (#t: Type) (s1 s2: Seq.seq t)
: Lemma (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2) =
| assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.synth_dtuple2_recip | val synth_dtuple2_recip
(#t1: Type)
(#t2: (t1 -> Type))
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x) | val synth_dtuple2_recip
(#t1: Type)
(#t2: (t1 -> Type))
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x) | let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 8,
"end_line": 1206,
"start_col": 0,
"start_line": 1200
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: t1 -> y: LowParse.Spec.Base.refine_with_tag FStar.Pervasives.dfst x -> t2 x | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.refine_with_tag",
"Prims.dtuple2",
"FStar.Pervasives.dfst",
"FStar.Pervasives.dsnd"
] | [] | false | false | false | false | false | let synth_dtuple2_recip
(#t1: Type)
(#t2: (t1 -> Type))
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x) =
| dsnd y | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.serialize_tot_tagged_union | val serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t -> Tot (tot_parser k (refine_with_tag tag_of_data t))))
(s: (t: tag_t -> Tot (serializer #k (p t))))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | val serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t -> Tot (tot_parser k (refine_with_tag tag_of_data t))))
(s: (t: tag_t -> Tot (serializer #k (p t))))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_ | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 5,
"end_line": 1173,
"start_col": 0,
"start_line": 1158
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)] | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
st: LowParse.Spec.Base.serializer pt ->
tag_of_data: (_: data_t -> tag_t) ->
s: (t: tag_t -> LowParse.Spec.Base.serializer (p t))
-> Prims.Pure
(LowParse.Spec.Base.serializer (LowParse.Spec.Combinators.tot_parse_tagged_union pt
tag_of_data
p)) | Prims.Pure | [] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Base.serialize_ext",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.parse_tagged_union",
"LowParse.Spec.Combinators.serialize_tagged_union",
"LowParse.Spec.Combinators.tot_parse_tagged_union",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"Prims.l_True"
] | [] | false | false | false | false | false | let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t -> Tot (tot_parser k (refine_with_tag tag_of_data t))))
(s: (t: tag_t -> Tot (serializer #k (p t))))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) =
| serialize_ext _ (serialize_tagged_union st tag_of_data s) _ | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.synth_dtuple2 | val synth_dtuple2 (#t1: Type) (#t2: (t1 -> Type)) (x: t1) (y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) | val synth_dtuple2 (#t1: Type) (#t2: (t1 -> Type)) (x: t1) (y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) | let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 12,
"end_line": 1184,
"start_col": 0,
"start_line": 1178
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: t1 -> y: t2 x -> LowParse.Spec.Base.refine_with_tag FStar.Pervasives.dfst x | Prims.Tot | [
"total"
] | [] | [
"Prims.Mkdtuple2",
"LowParse.Spec.Base.refine_with_tag",
"Prims.dtuple2",
"FStar.Pervasives.dfst"
] | [] | false | false | false | false | false | let synth_dtuple2 (#t1: Type) (#t2: (t1 -> Type)) (x: t1) (y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) =
| (| x, y |) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.serialize_dtuple2_eq' | val serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1 -> parser k2 (t2 x)))
(s2: (x: t1 -> serializer (p2 x)))
(xy: dtuple2 t1 t2)
: Tot
(squash ((serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy ==
bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy))) | val serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1 -> parser k2 (t2 x)))
(s2: (x: t1 -> serializer (p2 x)))
(xy: dtuple2 t1 t2)
: Tot
(squash ((serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy ==
bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy))) | let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 31,
"end_line": 1308,
"start_col": 0,
"start_line": 1296
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: (x: t1 -> LowParse.Spec.Base.serializer (p2 x)) ->
xy: Prims.dtuple2 t1 t2
-> Prims.squash (LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_dtuple2 s1 s2)
xy ==
LowParse.Spec.Combinators.bare_serialize_dtuple2 s1 s2 xy) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"Prims.dtuple2",
"LowParse.Spec.Combinators.serialize_dtuple2_eq",
"Prims.squash",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.serialize",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.parse_dtuple2",
"LowParse.Spec.Combinators.serialize_dtuple2",
"LowParse.Spec.Combinators.bare_serialize_dtuple2"
] | [] | false | false | true | false | false | let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1 -> parser k2 (t2 x)))
(s2: (x: t1 -> serializer (p2 x)))
(xy: dtuple2 t1 t2)
: Tot
(squash ((serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy ==
bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy))) =
| serialize_dtuple2_eq s1 s2 xy | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.bare_parse_dtuple2 | val bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1 -> parser k2 (t2 x)))
: Tot (bare_parser (dtuple2 t1 t2)) | val bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1 -> parser k2 (t2 x)))
: Tot (bare_parser (dtuple2 t1 t2)) | let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 13,
"end_line": 1256,
"start_col": 0,
"start_line": 1239
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p1: LowParse.Spec.Base.parser k1 t1 -> p2: (x: t1 -> LowParse.Spec.Base.parser k2 (t2 x))
-> LowParse.Spec.Base.bare_parser (Prims.dtuple2 t1 t2) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.tuple2",
"Prims.dtuple2",
"FStar.Pervasives.Native.Mktuple2",
"Prims.Mkdtuple2",
"Prims.op_Addition",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.None",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"LowParse.Spec.Base.bare_parser"
] | [] | false | false | false | false | false | let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1 -> parser k2 (t2 x)))
: Tot (bare_parser (dtuple2 t1 t2)) =
| fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
(match parse (p2 x1) b' with
| Some (x2, consumed2) -> Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None)
| _ -> None | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.seq_slice_append_l | val seq_slice_append_l (#t: Type) (s1 s2: Seq.seq t)
: Lemma (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1) | val seq_slice_append_l (#t: Type) (s1 s2: Seq.seq t)
: Lemma (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1) | let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 72,
"end_line": 1072,
"start_col": 0,
"start_line": 1067
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s1: FStar.Seq.Base.seq t -> s2: FStar.Seq.Base.seq t
-> FStar.Pervasives.Lemma
(ensures FStar.Seq.Base.slice (FStar.Seq.Base.append s1 s2) 0 (FStar.Seq.Base.length s1) == s1) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Seq.Base.seq",
"Prims._assert",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.append",
"FStar.Seq.Base.length",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let seq_slice_append_l (#t: Type) (s1 s2: Seq.seq t)
: Lemma (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1) =
| assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.lift_serializer' | val lift_serializer'
(#k: parser_kind)
(#t: Type)
(#f: (unit -> GTot (parser k t)))
(s: (unit -> GTot (serializer (f ()))))
: Tot (bare_serializer t) | val lift_serializer'
(#k: parser_kind)
(#t: Type)
(#f: (unit -> GTot (parser k t)))
(s: (unit -> GTot (serializer (f ()))))
: Tot (bare_serializer t) | let lift_serializer'
(#k: parser_kind)
(#t: Type)
(#f: unit -> GTot (parser k t))
(s: unit -> GTot (serializer (f ())))
: Tot (bare_serializer t)
= fun (x: t) -> serialize (s ()) x | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 34,
"end_line": 1772,
"start_col": 0,
"start_line": 1766
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x))
let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None
let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf))
= let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2)
let bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2);
assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2)
let bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
bare_parse_strengthen_no_lookahead p1 p2 prf;
bare_parse_strengthen_injective p1 p2 prf;
parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf);
()
let parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (parser k (x: t1 { p2 x } ))
= bare_parse_strengthen_correct p1 p2 prf;
bare_parse_strengthen p1 p2 prf
let serialize_strengthen'
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: GTot bytes
= serialize s input
let serialize_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: Lemma
(let output = serialize_strengthen' p2 prf s input in
parse (parse_strengthen p1 p2 prf) output == Some (input, Seq.length output))
= ()
let serialize_strengthen
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
: Tot (serializer (parse_strengthen p1 p2 prf))
= Classical.forall_intro (serialize_strengthen_correct p2 prf s);
serialize_strengthen' p2 prf s
let compose (#t1 #t2 #t3: Type) (f1: t1 -> GTot t2) (f2: t2 -> GTot t3) (x: t1) : GTot t3 =
let y1 = f1 x in
f2 y1
val make_total_constant_size_parser_compose
(sz: nat)
(t1 t2: Type)
(f1: ((s: bytes {Seq.length s == sz}) -> GTot t1))
(g2: t1 -> GTot t2)
: Lemma
(requires (
make_total_constant_size_parser_precond sz t1 f1 /\
(forall x x' . g2 x == g2 x' ==> x == x')
))
(ensures (
make_total_constant_size_parser_precond sz t1 f1 /\
make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\
(forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\
(forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input)
))
(** Tot vs. Ghost *)
unfold
let lift_parser'
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Tot (bare_parser t)
= fun (input: bytes) -> parse (f ()) input
let lift_parser_correct
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Lemma
(parser_kind_prop k (lift_parser' f))
= parser_kind_prop_ext k (f ()) (lift_parser' f)
let lift_parser
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Tot (parser k t)
= lift_parser_correct f;
lift_parser' f | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: (_: Prims.unit -> Prims.GTot (LowParse.Spec.Base.serializer (f ())))
-> LowParse.Spec.Base.bare_serializer t | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"Prims.unit",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Base.serialize",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.bare_serializer"
] | [] | false | false | false | false | false | let lift_serializer'
(#k: parser_kind)
(#t: Type)
(#f: (unit -> GTot (parser k t)))
(s: (unit -> GTot (serializer (f ()))))
: Tot (bare_serializer t) =
| fun (x: t) -> serialize (s ()) x | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_dtuple2_eq' | val parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1 -> parser k2 (t2 x)))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b) | val parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1 -> parser k2 (t2 x)))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b) | let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 26,
"end_line": 1268,
"start_col": 0,
"start_line": 1258
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k1 t1 ->
p2: (x: t1 -> LowParse.Spec.Base.parser k2 (t2 x)) ->
b: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Combinators.parse_dtuple2 p1 p2) b ==
LowParse.Spec.Combinators.bare_parse_dtuple2 p1 p2 b) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.parse_dtuple2_eq",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"Prims.dtuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Combinators.parse_dtuple2",
"LowParse.Spec.Combinators.bare_parse_dtuple2",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1 -> parser k2 (t2 x)))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b) =
| parse_dtuple2_eq p1 p2 b | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_dtuple2 | val parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1 -> parser k2 (t2 x)))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2)) | val parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1 -> parser k2 (t2 x)))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2)) | let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x)) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 57,
"end_line": 1197,
"start_col": 0,
"start_line": 1186
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p1: LowParse.Spec.Base.parser k1 t1 -> p2: (x: t1 -> LowParse.Spec.Base.parser k2 (t2 x))
-> LowParse.Spec.Base.parser (LowParse.Spec.Combinators.and_then_kind k1 k2) (Prims.dtuple2 t1 t2) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.parse_tagged_union",
"Prims.dtuple2",
"FStar.Pervasives.dfst",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Combinators.synth_dtuple2",
"LowParse.Spec.Combinators.and_then_kind"
] | [] | false | false | false | false | false | let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1 -> parser k2 (t2 x)))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2)) =
| parse_tagged_union p1 dfst (fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x)) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.tot_parse_tagged_union_eq | val tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (tot_parser k (refine_with_tag tag_of_data t))))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input ==
(match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None)) | val tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (tot_parser k (refine_with_tag tag_of_data t))))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input ==
(match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None)) | let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 53,
"end_line": 1050,
"start_col": 0,
"start_line": 1031
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
pt: LowParse.Spec.Base.tot_parser kt tag_t ->
tag_of_data: (_: data_t -> tag_t) ->
p:
(t: tag_t
-> LowParse.Spec.Base.tot_parser k (LowParse.Spec.Base.refine_with_tag tag_of_data t)) ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Combinators.tot_parse_tagged_union pt tag_of_data p)
input ==
(match LowParse.Spec.Base.parse pt input with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ tg consumed_tg) ->
let input_tg = FStar.Seq.Base.slice input consumed_tg (FStar.Seq.Base.length input) in
(match LowParse.Spec.Base.parse (p tg) input_tg with
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ x consumed_x) ->
FStar.Pervasives.Native.Some (x, consumed_tg + consumed_x)
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None)
<:
FStar.Pervasives.Native.option (data_t * LowParse.Spec.Base.consumed_length input))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.parse_tagged_union_eq",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Combinators.tot_parse_tagged_union",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (tot_parser k (refine_with_tag tag_of_data t))))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input ==
(match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None)) =
| parse_tagged_union_eq #kt pt tag_of_data #k p input | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.bare_parse_strengthen | val bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1{p2 x})) | val bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1{p2 x})) | let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 13,
"end_line": 1632,
"start_col": 0,
"start_line": 1619
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k t1 ->
p2: (_: t1 -> Prims.GTot Type0) ->
prf: LowParse.Spec.Combinators.parse_strengthen_prf p1 p2
-> LowParse.Spec.Base.bare_parser (x: t1{p2 x}) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.parse_strengthen_prf",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.Mktuple2",
"Prims.unit",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.None",
"LowParse.Spec.Base.bare_parser"
] | [] | false | false | false | false | false | let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1{p2 x})) =
| fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let x':x': t1{p2 x'} = x in
Some (x', consumed)
| _ -> None | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.bare_parse_strengthen_correct | val bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf)) | val bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf)) | let bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
bare_parse_strengthen_no_lookahead p1 p2 prf;
bare_parse_strengthen_injective p1 p2 prf;
parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf);
() | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 4,
"end_line": 1671,
"start_col": 0,
"start_line": 1658
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x))
let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None
let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf))
= let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2)
let bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2);
assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k t1 ->
p2: (_: t1 -> Prims.GTot Type0) ->
prf: LowParse.Spec.Combinators.parse_strengthen_prf p1 p2
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.injective (LowParse.Spec.Combinators.bare_parse_strengthen p1 p2 prf) /\
LowParse.Spec.Base.parser_kind_prop k
(LowParse.Spec.Combinators.bare_parse_strengthen p1 p2 prf)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.parse_strengthen_prf",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.bare_parse_strengthen",
"LowParse.Spec.Combinators.bare_parse_strengthen_injective",
"LowParse.Spec.Combinators.bare_parse_strengthen_no_lookahead",
"Prims.l_True",
"Prims.squash",
"Prims.l_and",
"LowParse.Spec.Base.injective",
"LowParse.Spec.Base.parser_kind_prop",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf)) =
| parser_kind_prop_equiv k p1;
bare_parse_strengthen_no_lookahead p1 p2 prf;
bare_parse_strengthen_injective p1 p2 prf;
parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf);
() | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.bare_serialize_tagged_union_correct | val bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(s: (t: tag_t -> Tot (serializer (p t))))
: Lemma (requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures
(serializer_correct (parse_tagged_union pt tag_of_data p)
(bare_serialize_tagged_union st tag_of_data s))) | val bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(s: (t: tag_t -> Tot (serializer (p t))))
: Lemma (requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures
(serializer_correct (parse_tagged_union pt tag_of_data p)
(bare_serialize_tagged_union st tag_of_data s))) | let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 28,
"end_line": 1126,
"start_col": 0,
"start_line": 1081
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
st: LowParse.Spec.Base.serializer pt ->
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
s: (t: tag_t -> LowParse.Spec.Base.serializer (p t))
-> FStar.Pervasives.Lemma
(requires
Mkparser_kind'?.parser_kind_subkind kt ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong)
(ensures
LowParse.Spec.Base.serializer_correct (LowParse.Spec.Combinators.parse_tagged_union pt
tag_of_data
p)
(LowParse.Spec.Combinators.bare_serialize_tagged_union st tag_of_data s)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Base.refine_with_tag",
"FStar.Classical.forall_intro",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Combinators.bare_serialize_tagged_union",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Combinators.parse_tagged_union",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Base.serialize",
"LowParse.Spec.Combinators.seq_slice_append_r",
"Prims._assert",
"FStar.Seq.Base.seq",
"FStar.Seq.Base.append",
"Prims.nat",
"LowParse.Spec.Base.injective_postcond",
"LowParse.Spec.Base.injective_precond",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"LowParse.Spec.Base.no_lookahead_on_postcond",
"LowParse.Spec.Base.no_lookahead_on_precond",
"LowParse.Spec.Combinators.seq_slice_append_l",
"FStar.Seq.Base.slice",
"Prims.op_LessThanOrEqual",
"LowParse.Spec.Base.no_lookahead_on",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.parse_tagged_union_eq",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"LowParse.Spec.Base.ParserStrong",
"LowParse.Spec.Base.serializer_correct",
"LowParse.Spec.Combinators.and_then_kind"
] | [] | false | false | true | false | false | let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(s: (t: tag_t -> Tot (serializer (p t))))
: Lemma (requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures
(serializer_correct (parse_tagged_union pt tag_of_data p)
(bare_serialize_tagged_union st tag_of_data s))) =
| let prf (x: data_t)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) ==
Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x))) =
parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let u:refine_with_tag tag_of_data t = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let Some (_, len') = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt
(serialize st t)
(bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt
(serialize st t)
(bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let Some (x1, len1) = v1 in
let Some (x1', len1') = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x ==
Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.bare_parse_strengthen_no_lookahead | val bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Lemma (no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf)) | val bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Lemma (no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf)) | let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf))
= let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 88,
"end_line": 1643,
"start_col": 0,
"start_line": 1634
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x))
let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k t1 ->
p2: (_: t1 -> Prims.GTot Type0) ->
prf: LowParse.Spec.Combinators.parse_strengthen_prf p1 p2
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.no_lookahead p1 ==>
LowParse.Spec.Base.no_lookahead (LowParse.Spec.Combinators.bare_parse_strengthen p1 p2 prf)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.parse_strengthen_prf",
"Prims._assert",
"Prims.l_Forall",
"LowParse.Bytes.bytes",
"Prims.l_imp",
"LowParse.Spec.Base.no_lookahead_on",
"LowParse.Spec.Base.bare_parser",
"LowParse.Spec.Combinators.bare_parse_strengthen",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"LowParse.Spec.Base.no_lookahead",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Lemma (no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf)) =
| let p':bare_parser (x: t1{p2 x}) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1: bytes) (b2: bytes). no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2) | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_filter_payload_kind | val parse_filter_payload_kind:parser_kind | val parse_filter_payload_kind:parser_kind | let parse_filter_payload_kind : parser_kind =
strong_parser_kind 0 0 None | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 29,
"end_line": 1810,
"start_col": 0,
"start_line": 1809
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x))
let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None
let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf))
= let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2)
let bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2);
assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2)
let bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
bare_parse_strengthen_no_lookahead p1 p2 prf;
bare_parse_strengthen_injective p1 p2 prf;
parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf);
()
let parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (parser k (x: t1 { p2 x } ))
= bare_parse_strengthen_correct p1 p2 prf;
bare_parse_strengthen p1 p2 prf
let serialize_strengthen'
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: GTot bytes
= serialize s input
let serialize_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: Lemma
(let output = serialize_strengthen' p2 prf s input in
parse (parse_strengthen p1 p2 prf) output == Some (input, Seq.length output))
= ()
let serialize_strengthen
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
: Tot (serializer (parse_strengthen p1 p2 prf))
= Classical.forall_intro (serialize_strengthen_correct p2 prf s);
serialize_strengthen' p2 prf s
let compose (#t1 #t2 #t3: Type) (f1: t1 -> GTot t2) (f2: t2 -> GTot t3) (x: t1) : GTot t3 =
let y1 = f1 x in
f2 y1
val make_total_constant_size_parser_compose
(sz: nat)
(t1 t2: Type)
(f1: ((s: bytes {Seq.length s == sz}) -> GTot t1))
(g2: t1 -> GTot t2)
: Lemma
(requires (
make_total_constant_size_parser_precond sz t1 f1 /\
(forall x x' . g2 x == g2 x' ==> x == x')
))
(ensures (
make_total_constant_size_parser_precond sz t1 f1 /\
make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\
(forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\
(forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input)
))
(** Tot vs. Ghost *)
unfold
let lift_parser'
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Tot (bare_parser t)
= fun (input: bytes) -> parse (f ()) input
let lift_parser_correct
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Lemma
(parser_kind_prop k (lift_parser' f))
= parser_kind_prop_ext k (f ()) (lift_parser' f)
let lift_parser
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Tot (parser k t)
= lift_parser_correct f;
lift_parser' f
unfold
let lift_serializer'
(#k: parser_kind)
(#t: Type)
(#f: unit -> GTot (parser k t))
(s: unit -> GTot (serializer (f ())))
: Tot (bare_serializer t)
= fun (x: t) -> serialize (s ()) x
let lift_serializer_correct
(#k: parser_kind)
(#t: Type)
(#f: unit -> GTot (parser k t))
(s: unit -> GTot (serializer (f ())))
: Lemma
(serializer_correct (lift_parser f) (lift_serializer' s))
= ()
let lift_serializer
(#k: parser_kind)
(#t: Type)
(#f: unit -> GTot (parser k t))
(s: unit -> GTot (serializer (f ())))
: Tot (serializer #k #t (lift_parser f))
= lift_serializer_correct #k #t #f s;
lift_serializer' #k #t #f s
(** Refinements *)
// unfold
inline_for_extraction
let parse_filter_kind (k: parser_kind) : Tot parser_kind =
{
parser_kind_low = k.parser_kind_low;
parser_kind_high = k.parser_kind_high;
parser_kind_metadata =
begin match k.parser_kind_metadata with
| Some ParserKindMetadataFail -> Some ParserKindMetadataFail
| _ -> None
end;
parser_kind_subkind = k.parser_kind_subkind;
} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | LowParse.Spec.Base.parser_kind | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.strong_parser_kind",
"FStar.Pervasives.Native.None",
"LowParse.Spec.Base.parser_kind_metadata_some"
] | [] | false | false | false | true | false | let parse_filter_payload_kind:parser_kind =
| strong_parser_kind 0 0 None | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_filter_refine | val parse_filter_refine : f: (_: t -> Prims.GTot Prims.bool) -> Type | let parse_filter_refine (#t: Type) (f: (t -> GTot bool)) =
(x: t { f x == true } ) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 25,
"end_line": 1813,
"start_col": 0,
"start_line": 1812
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x))
let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None
let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf))
= let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2)
let bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2);
assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2)
let bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
bare_parse_strengthen_no_lookahead p1 p2 prf;
bare_parse_strengthen_injective p1 p2 prf;
parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf);
()
let parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (parser k (x: t1 { p2 x } ))
= bare_parse_strengthen_correct p1 p2 prf;
bare_parse_strengthen p1 p2 prf
let serialize_strengthen'
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: GTot bytes
= serialize s input
let serialize_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: Lemma
(let output = serialize_strengthen' p2 prf s input in
parse (parse_strengthen p1 p2 prf) output == Some (input, Seq.length output))
= ()
let serialize_strengthen
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
: Tot (serializer (parse_strengthen p1 p2 prf))
= Classical.forall_intro (serialize_strengthen_correct p2 prf s);
serialize_strengthen' p2 prf s
let compose (#t1 #t2 #t3: Type) (f1: t1 -> GTot t2) (f2: t2 -> GTot t3) (x: t1) : GTot t3 =
let y1 = f1 x in
f2 y1
val make_total_constant_size_parser_compose
(sz: nat)
(t1 t2: Type)
(f1: ((s: bytes {Seq.length s == sz}) -> GTot t1))
(g2: t1 -> GTot t2)
: Lemma
(requires (
make_total_constant_size_parser_precond sz t1 f1 /\
(forall x x' . g2 x == g2 x' ==> x == x')
))
(ensures (
make_total_constant_size_parser_precond sz t1 f1 /\
make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\
(forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\
(forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input)
))
(** Tot vs. Ghost *)
unfold
let lift_parser'
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Tot (bare_parser t)
= fun (input: bytes) -> parse (f ()) input
let lift_parser_correct
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Lemma
(parser_kind_prop k (lift_parser' f))
= parser_kind_prop_ext k (f ()) (lift_parser' f)
let lift_parser
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Tot (parser k t)
= lift_parser_correct f;
lift_parser' f
unfold
let lift_serializer'
(#k: parser_kind)
(#t: Type)
(#f: unit -> GTot (parser k t))
(s: unit -> GTot (serializer (f ())))
: Tot (bare_serializer t)
= fun (x: t) -> serialize (s ()) x
let lift_serializer_correct
(#k: parser_kind)
(#t: Type)
(#f: unit -> GTot (parser k t))
(s: unit -> GTot (serializer (f ())))
: Lemma
(serializer_correct (lift_parser f) (lift_serializer' s))
= ()
let lift_serializer
(#k: parser_kind)
(#t: Type)
(#f: unit -> GTot (parser k t))
(s: unit -> GTot (serializer (f ())))
: Tot (serializer #k #t (lift_parser f))
= lift_serializer_correct #k #t #f s;
lift_serializer' #k #t #f s
(** Refinements *)
// unfold
inline_for_extraction
let parse_filter_kind (k: parser_kind) : Tot parser_kind =
{
parser_kind_low = k.parser_kind_low;
parser_kind_high = k.parser_kind_high;
parser_kind_metadata =
begin match k.parser_kind_metadata with
| Some ParserKindMetadataFail -> Some ParserKindMetadataFail
| _ -> None
end;
parser_kind_subkind = k.parser_kind_subkind;
}
// unfold
let parse_filter_payload_kind : parser_kind =
strong_parser_kind 0 0 None | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: t -> Prims.GTot Prims.bool) -> Type | Prims.Tot | [
"total"
] | [] | [
"Prims.bool",
"Prims.eq2"
] | [] | false | false | false | false | true | let parse_filter_refine (#t: Type) (f: (t -> GTot bool)) =
| (x: t{f x == true}) | false |
|
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.serialize_filter' | val serialize_filter'
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
(f: (t -> GTot bool))
: Tot (bare_serializer (x: t{f x == true})) | val serialize_filter'
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
(f: (t -> GTot bool))
: Tot (bare_serializer (x: t{f x == true})) | let serialize_filter'
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
(f: (t -> GTot bool))
: Tot (bare_serializer (x: t { f x == true } ))
= fun (input: t { f input == true } ) -> s input | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 48,
"end_line": 1887,
"start_col": 0,
"start_line": 1880
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x))
let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None
let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf))
= let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2)
let bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2);
assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2)
let bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
bare_parse_strengthen_no_lookahead p1 p2 prf;
bare_parse_strengthen_injective p1 p2 prf;
parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf);
()
let parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (parser k (x: t1 { p2 x } ))
= bare_parse_strengthen_correct p1 p2 prf;
bare_parse_strengthen p1 p2 prf
let serialize_strengthen'
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: GTot bytes
= serialize s input
let serialize_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
(input: t1 { p2 input } )
: Lemma
(let output = serialize_strengthen' p2 prf s input in
parse (parse_strengthen p1 p2 prf) output == Some (input, Seq.length output))
= ()
let serialize_strengthen
(#k: parser_kind)
(#t1: Type)
(#p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
(s: serializer p1)
: Tot (serializer (parse_strengthen p1 p2 prf))
= Classical.forall_intro (serialize_strengthen_correct p2 prf s);
serialize_strengthen' p2 prf s
let compose (#t1 #t2 #t3: Type) (f1: t1 -> GTot t2) (f2: t2 -> GTot t3) (x: t1) : GTot t3 =
let y1 = f1 x in
f2 y1
val make_total_constant_size_parser_compose
(sz: nat)
(t1 t2: Type)
(f1: ((s: bytes {Seq.length s == sz}) -> GTot t1))
(g2: t1 -> GTot t2)
: Lemma
(requires (
make_total_constant_size_parser_precond sz t1 f1 /\
(forall x x' . g2 x == g2 x' ==> x == x')
))
(ensures (
make_total_constant_size_parser_precond sz t1 f1 /\
make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\
(forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\
(forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input)
))
(** Tot vs. Ghost *)
unfold
let lift_parser'
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Tot (bare_parser t)
= fun (input: bytes) -> parse (f ()) input
let lift_parser_correct
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Lemma
(parser_kind_prop k (lift_parser' f))
= parser_kind_prop_ext k (f ()) (lift_parser' f)
let lift_parser
(#k: parser_kind)
(#t: Type)
(f: unit -> GTot (parser k t))
: Tot (parser k t)
= lift_parser_correct f;
lift_parser' f
unfold
let lift_serializer'
(#k: parser_kind)
(#t: Type)
(#f: unit -> GTot (parser k t))
(s: unit -> GTot (serializer (f ())))
: Tot (bare_serializer t)
= fun (x: t) -> serialize (s ()) x
let lift_serializer_correct
(#k: parser_kind)
(#t: Type)
(#f: unit -> GTot (parser k t))
(s: unit -> GTot (serializer (f ())))
: Lemma
(serializer_correct (lift_parser f) (lift_serializer' s))
= ()
let lift_serializer
(#k: parser_kind)
(#t: Type)
(#f: unit -> GTot (parser k t))
(s: unit -> GTot (serializer (f ())))
: Tot (serializer #k #t (lift_parser f))
= lift_serializer_correct #k #t #f s;
lift_serializer' #k #t #f s
(** Refinements *)
// unfold
inline_for_extraction
let parse_filter_kind (k: parser_kind) : Tot parser_kind =
{
parser_kind_low = k.parser_kind_low;
parser_kind_high = k.parser_kind_high;
parser_kind_metadata =
begin match k.parser_kind_metadata with
| Some ParserKindMetadataFail -> Some ParserKindMetadataFail
| _ -> None
end;
parser_kind_subkind = k.parser_kind_subkind;
}
// unfold
let parse_filter_payload_kind : parser_kind =
strong_parser_kind 0 0 None
let parse_filter_refine (#t: Type) (f: (t -> GTot bool)) =
(x: t { f x == true } )
let parse_filter_payload
(#t: Type)
(f: (t -> GTot bool))
(v: t)
: Tot (parser parse_filter_payload_kind (parse_filter_refine f))
= let p = lift_parser (fun () ->
if f v
then
let v' : (x: t { f x == true } ) = v in
weaken parse_filter_payload_kind (parse_ret v')
else fail_parser parse_filter_payload_kind (parse_filter_refine f)
)
in
parser_kind_prop_equiv parse_filter_payload_kind p;
p
val parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f))
val parse_filter_eq
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
(input: bytes)
: Lemma
(parse (parse_filter p f) input == (match parse p input with
| None -> None
| Some (x, consumed) ->
if f x
then Some (x, consumed)
else None
))
val tot_parse_filter
(#k: parser_kind)
(#t: Type)
(p: tot_parser k t)
(f: (t -> Tot bool))
: Pure (tot_parser (parse_filter_kind k) (parse_filter_refine f))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_filter #k p f) x
))
let tot_parse_filter_eq
(#k: parser_kind)
(#t: Type)
(p: tot_parser k t)
(f: (t -> Tot bool))
(input: bytes)
: Lemma
(parse (tot_parse_filter p f) input == (match parse p input with
| None -> None
| Some (x, consumed) ->
if f x
then Some (x, consumed)
else None
))
= parse_filter_eq #k p f input | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: LowParse.Spec.Base.serializer p -> f: (_: t -> Prims.GTot Prims.bool)
-> LowParse.Spec.Base.bare_serializer (x: t{f x == true}) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.bool",
"Prims.eq2",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.bare_serializer"
] | [] | false | false | false | false | false | let serialize_filter'
(#k: parser_kind)
(#t: Type)
(#p: parser k t)
(s: serializer p)
(f: (t -> GTot bool))
: Tot (bare_serializer (x: t{f x == true})) =
| fun (input: t{f input == true}) -> s input | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.parse_strengthen | val parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Tot (parser k (x: t1{p2 x})) | val parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Tot (parser k (x: t1{p2 x})) | let parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (parser k (x: t1 { p2 x } ))
= bare_parse_strengthen_correct p1 p2 prf;
bare_parse_strengthen p1 p2 prf | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 33,
"end_line": 1681,
"start_col": 0,
"start_line": 1673
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x))
let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None
let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf))
= let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2)
let bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2);
assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2)
let bare_parse_strengthen_correct
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf) /\
parser_kind_prop k (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
bare_parse_strengthen_no_lookahead p1 p2 prf;
bare_parse_strengthen_injective p1 p2 prf;
parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf);
() | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k t1 ->
p2: (_: t1 -> Prims.GTot Type0) ->
prf: LowParse.Spec.Combinators.parse_strengthen_prf p1 p2
-> LowParse.Spec.Base.parser k (x: t1{p2 x}) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.parse_strengthen_prf",
"LowParse.Spec.Combinators.bare_parse_strengthen",
"Prims.unit",
"LowParse.Spec.Combinators.bare_parse_strengthen_correct"
] | [] | false | false | false | false | false | let parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Tot (parser k (x: t1{p2 x})) =
| bare_parse_strengthen_correct p1 p2 prf;
bare_parse_strengthen p1 p2 prf | false |
LowParse.Spec.Combinators.fsti | LowParse.Spec.Combinators.bare_parse_strengthen_injective | val bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Lemma (injective (bare_parse_strengthen p1 p2 prf)) | val bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Lemma (injective (bare_parse_strengthen p1 p2 prf)) | let bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(injective (bare_parse_strengthen p1 p2 prf))
= parser_kind_prop_equiv k p1;
let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2);
assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2) | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 94,
"end_line": 1656,
"start_col": 0,
"start_line": 1645
} | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Constant-size parsers *)
let make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Tot (bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let make_constant_size_parser_precond_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
(s1: bytes { Seq.length s1 == sz } )
(s2: bytes { Seq.length s2 == sz } )
: GTot Type0
= (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
let make_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
let make_constant_size_parser_precond'
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2
let make_constant_size_parser_injective
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Lemma
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (
injective (make_constant_size_parser_aux sz t f)
))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
let prf1
(b1 b2: bytes)
: Lemma
(requires (injective_precond p b1 b2))
(ensures (injective_postcond p b1 b2))
= assert (Some? (parse p b1));
assert (Some? (parse p b2));
let (Some (v1, len1)) = parse p b1 in
let (Some (v2, len2)) = parse p b2 in
assert ((len1 <: nat) == (len2 <: nat));
assert ((len1 <: nat) == sz);
assert ((len2 <: nat) == sz);
assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2));
assert (make_constant_size_parser_precond' sz t f)
in
Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1))
let constant_size_parser_kind
(sz: nat)
: Tot parser_kind
= strong_parser_kind sz sz None
let make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
: Pure (
parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let tot_make_constant_size_parser_aux
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Tot (tot_bare_parser t)
= fun (s: bytes) ->
if Seq.length s < sz
then None
else begin
let s' : bytes = Seq.slice s 0 sz in
match f s' with
| None -> None
| Some v ->
let (sz: consumed_length s) = sz in
Some (v, sz)
end
let tot_make_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
: Pure (
tot_parser
(constant_size_parser_kind sz)
t
)
(requires (
make_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in
make_constant_size_parser_injective sz t f;
parser_kind_prop_equiv (constant_size_parser_kind sz) p;
p
let make_total_constant_size_parser_precond
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: GTot Type0
= forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)}
f s1 == f s2 ==> Seq.equal s1 s2
let make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> GTot t))
: Pure (
parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
let tot_make_total_constant_size_parser
(sz: nat)
(t: Type)
(f: ((s: bytes {Seq.length s == sz}) -> Tot t))
: Pure (
tot_parser
(total_constant_size_parser_kind sz)
t
)
(requires (
make_total_constant_size_parser_precond sz t f
))
(ensures (fun _ -> True))
= let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in
parser_kind_prop_equiv (total_constant_size_parser_kind sz) p;
p
(** Combinators *)
/// monadic return for the parser monad
unfold
let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) =
fun (b: bytes) -> Some (v, (0 <: consumed_length b))
// unfold
inline_for_extraction
let parse_ret_kind : parser_kind =
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) =
parser_kind_prop_equiv parse_ret_kind (parse_ret' v);
parse_ret' v
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) =
tot_parse_ret v
let serialize_ret
(#t: Type)
(v: t)
(v_unique: (v' : t) -> Lemma (v == v'))
: Tot (serializer (parse_ret v))
= mk_serializer
(parse_ret v)
(fun (x: t) -> Seq.empty)
(fun x -> v_unique x)
let parse_empty : parser parse_ret_kind unit =
parse_ret ()
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
#set-options "--z3rlimit 16"
let fail_parser_kind_precond
(k: parser_kind)
: GTot Type0
= k.parser_kind_metadata <> Some ParserKindMetadataTotal /\
(Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high)
let fail_parser'
(t: Type)
: Tot (tot_bare_parser t)
= fun _ -> None
let tot_fail_parser
(k: parser_kind)
(t: Type)
: Pure (tot_parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= let p = fail_parser' t in
parser_kind_prop_equiv k p;
tot_strengthen k p
let fail_parser
(k: parser_kind)
(t: Type)
: Pure (parser k t)
(requires (fail_parser_kind_precond k))
(ensures (fun _ -> True))
= tot_fail_parser k t
let fail_serializer
(k: parser_kind {fail_parser_kind_precond k} )
(t: Type)
(prf: (x: t) -> Lemma False)
: Tot (serializer (fail_parser k t))
= mk_serializer
(fail_parser k t)
(fun x -> prf x; false_elim ())
(fun x -> prf x)
inline_for_extraction
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false : parser parse_false_kind (squash False) = fail_parser _ _
let serialize_false : serializer parse_false = fun input -> false_elim ()
/// monadic bind for the parser monad
let and_then_bare (#t:Type) (#t':Type)
(p:bare_parser t)
(p': (t -> Tot (bare_parser t'))) :
Tot (bare_parser t') =
fun (b: bytes) ->
match parse p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match parse p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let and_then_cases_injective_precond
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(x1 x2: t)
(b1 b2: bytes)
: GTot Type0
= Some? (parse (p' x1) b1) /\
Some? (parse (p' x2) b2) /\ (
let (Some (v1, _)) = parse (p' x1) b1 in
let (Some (v2, _)) = parse (p' x2) b2 in
v1 == v2
)
let and_then_cases_injective
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
: GTot Type0
= forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)}
and_then_cases_injective_precond p' x1 x2 b1 b2 ==>
x1 == x2
let and_then_cases_injective_intro
(#t:Type)
(#t':Type)
(p': (t -> Tot (bare_parser t')))
(lem: (
(x1: t) ->
(x2: t) ->
(b1: bytes) ->
(b2: bytes) ->
Lemma
(requires (and_then_cases_injective_precond p' x1 x2 b1 b2))
(ensures (x1 == x2))
))
: Lemma
(and_then_cases_injective p')
= Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
let and_then_injective
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
: Lemma
(requires (
injective p /\
(forall (x: t) . injective (p' x)) /\
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p')
))
= let ps = and_then_bare p p' in
let f
(b1 b2: bytes)
: Lemma
(requires (injective_precond ps b1 b2))
(ensures (injective_postcond ps b1 b2))
= let (Some (v1, len1)) = p b1 in
let (Some (v2, len2)) = p b2 in
let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in
let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in
assert (Some? ((p' v1) b1'));
assert (Some? ((p' v2) b2'));
assert (and_then_cases_injective_precond p' v1 v2 b1' b2');
assert (v1 == v2);
assert (injective_precond p b1 b2);
assert ((len1 <: nat) == (len2 <: nat));
assert (injective (p' v1));
assert (injective_precond (p' v1) b1' b2');
assert (injective_postcond (p' v1) b1' b2');
let (Some (_, len1')) = (p' v1) b1' in
let (Some (_, len2')) = (p' v2) b2' in
assert ((len1' <: nat) == (len2' <: nat));
Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1;
Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1;
assert (injective_postcond ps b1 b2)
in
Classical.forall_intro_2 (fun x -> Classical.move_requires (f x))
let and_then_no_lookahead_on
(#t:Type)
(#t':Type)
(p: bare_parser t)
(p': (t -> Tot (bare_parser t')))
(x: bytes)
(x' : bytes)
: Lemma
(requires (
no_lookahead p /\
injective p /\
(forall (x: t) . no_lookahead (p' x))
))
(ensures (no_lookahead_on (and_then_bare p p') x x'))
=
let f = and_then_bare p p' in
match f x with
| Some v ->
let (y, off) = v in
let off : nat = off in
let (off_x : consumed_length x ) = off in
if off <= Seq.length x'
then
let (off_x' : consumed_length x') = off in
let g () : Lemma
(requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x))
(ensures (
Some? (f x') /\ (
let (Some v') = f x' in
let (y', off') = v' in
y == y'
)))
= assert (Some? (p x));
let (Some (y1, off1)) = p x in
assert (off1 <= off);
assert (off1 <= Seq.length x');
assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1);
assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1);
assert (no_lookahead_on p x x');
assert (Some? (p x'));
let (Some v1') = p x' in
let (y1', off1') = v1' in
assert (y1 == y1');
assert (injective_precond p x x');
assert ((off1 <: nat) == (off1' <: nat));
let x2 : bytes = Seq.slice x off1 (Seq.length x) in
let x2' : bytes = Seq.slice x' off1 (Seq.length x') in
let p2 = p' y1 in
assert (Some? (p2 x2));
let (Some (y2, off2)) = p2 x2 in
assert (off == off1 + off2);
assert (off2 <= Seq.length x2);
assert (off2 <= Seq.length x2');
assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2));
assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2);
assert (no_lookahead_on p2 x2 x2');
assert (Some? (p2 x2'));
let (Some v2') = p2 x2' in
let (y2', _) = v2' in
assert (y2 == y2')
in
Classical.move_requires g ()
else ()
| _ -> ()
inline_for_extraction
let and_then_metadata
(k1 k2: parser_kind_metadata_t)
: Tot parser_kind_metadata_t
= match k1, k2 with
| Some ParserKindMetadataFail, _ -> k1
| _, Some ParserKindMetadataFail -> k2
| Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1
| _ -> None
// unfold
inline_for_extraction
let and_then_kind
(k1 k2: parser_kind)
: Tot parser_kind
= {
parser_kind_low = k1.parser_kind_low + k2.parser_kind_low;
parser_kind_high =
begin
if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high
then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high)
else None
end;
parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata;
parser_kind_subkind =
begin
if k2.parser_kind_subkind = Some ParserConsumesAll
then Some ParserConsumesAll
else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then Some ParserStrong
else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong)
then k1.parser_kind_subkind
else None
end;
}
let and_then_no_lookahead
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x));
if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then
Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
else ()
#set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64"
let and_then_correct
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Lemma
(requires (
and_then_cases_injective p'
))
(ensures (
injective (and_then_bare p p') /\
parser_kind_prop (and_then_kind k k') (and_then_bare p p')
))
= parser_kind_prop_equiv k p;
Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x));
parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p');
and_then_injective p p';
and_then_no_lookahead p p'
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
val and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
/// monadic return for the parser monad
unfold
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in
parse_fret' f v
let synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: GTot Type0
= forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
let synth_injective_intro
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
: Lemma
(requires (forall (x x' : t1) . f x == f x' ==> x == x'))
(ensures (synth_injective f))
= ()
let synth_injective_intro'
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(prf: (
(x: t1) ->
(x' : t1) ->
Lemma
(requires (f x == f x'))
(ensures (x == x'))
))
: Lemma
(synth_injective f)
= Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
let parse_synth'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Tot (bare_parser t2)
= fun b -> match parse p1 b with
| None -> None
| Some (x1, consumed) -> Some (f2 x1, consumed)
val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
val parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
let parse_synth_eq2
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(sq: squash (synth_injective f2))
(b: bytes)
: Lemma
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= parse_synth_eq p1 f2 b
val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b))
= parse_synth_eq #k p1 f2 b
let bare_serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Tot (bare_serializer t2) =
fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth_correct
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
: Lemma
(requires (
(forall (x : t2) . f2 (g1 x) == x) /\
(forall (x x' : t1) . f2 x == f2 x' ==> x == x')
))
(ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 )))
let synth_inverse
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: GTot Type0
= (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x)
let synth_inverse_intro
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
: Lemma
(requires (forall (x : t2) . f2 (g1 x) == x))
(ensures (synth_inverse f2 g1))
= ()
let synth_inverse_intro'
(#t1: Type)
(#t2: Type)
(f2: (t1 -> GTot t2))
(g1: (t2 -> GTot t1))
(prf: (x: t2) -> Lemma (f2 (g1 x) == x))
: Lemma
(ensures (synth_inverse f2 g1))
= Classical.forall_intro prf
let synth_inverse_synth_injective_pat
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
[SMTPat (synth_inverse g f)]
= assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2))
let synth_inverse_synth_injective
(#t1: Type)
(#t2: Type)
(f: (t1 -> GTot t2))
(g: (t2 -> GTot t1))
: Lemma
(requires (synth_inverse g f))
(ensures (synth_injective f))
= ()
let synth_inverse_synth_injective'
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f))
: Tot (squash (synth_injective f))
= ()
let synth_injective_synth_inverse_synth_inverse_recip
(#t1: Type)
(#t2: Type)
(g: (t2 -> GTot t1))
(f: (t1 -> GTot t2))
(u: squash (synth_inverse g f /\ synth_injective g))
: Tot (squash (synth_inverse f g))
= assert (forall x . g (f (g x)) == g x)
val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
val serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
let serialize_synth_eq'
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
(y1: bytes)
(q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x))
(y2: bytes)
(q2: squash (y2 == serialize s1 (g1 x)))
: Lemma
(ensures (y1 == y2))
= serialize_synth_eq p1 f2 s1 g1 u x
let serialize_tot_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer #k (tot_parse_synth p1 f2))
= serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _
val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
(* Strengthened versions of and_then *)
inline_for_extraction
let synth_tagged_union_data
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(tg: tag_t)
(x: refine_with_tag tag_of_data tg)
: Tot data_t
= x
let parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Tot (parser k data_t)
= parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
let parse_tagged_union_payload_and_then_cases_injective
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Lemma
(and_then_cases_injective (parse_tagged_union_payload tag_of_data p))
= and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 ->
parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1;
parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2
)
val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let bare_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(k': (t: tag_t) -> Tot parser_kind)
(p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(input: bytes)
: GTot (option (data_t * consumed_length input))
= match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let tot_parse_tagged_union_payload
(#tag_t: Type)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(tg: tag_t)
: Pure (tot_parser k data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x
))
= tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg)
val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_eq #kt pt tag_of_data #k p input
let bare_serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Tot (bare_serializer data_t)
= fun (d: data_t) ->
let tg = tag_of_data d in
Seq.append (st tg) (serialize (s tg) d)
let seq_slice_append_l
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1)
let seq_slice_append_r
(#t: Type)
(s1 s2: Seq.seq t)
: Lemma
(Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2)
= assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2)
let bare_serialize_tagged_union_correct
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s)))
= (* same proof as nondep_then *)
let prf
(x: data_t)
: Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x)))
= parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x);
let t = tag_of_data x in
let (u: refine_with_tag tag_of_data t) = x in
let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in
let v1 = parse pt (serialize st t) in
assert (Some? v1);
parser_kind_prop_equiv kt pt;
assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (_, len')) = parse pt (serialize st t) in
assert (len' == Seq.length (serialize st t));
assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x));
assert (Seq.slice (serialize st t) 0 len' == st t);
seq_slice_append_l (serialize st t) (serialize (s t) u);
assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (Some? v1');
assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x));
let (Some (x1, len1)) = v1 in
let (Some (x1', len1')) = v1' in
assert (x1 == x1');
assert ((len1 <: nat) == (len1' <: nat));
assert (x1 == t);
assert (len1 == Seq.length (serialize st t));
assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u));
seq_slice_append_r (serialize st t) (serialize (s t) u);
()
in
Classical.forall_intro prf
val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
val serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
let serialize_tot_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: tot_parser kt tag_t)
(st: serializer #kt pt)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer #k (p t)))
: Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= serialize_ext _
(serialize_tagged_union st tag_of_data s)
_
(* Dependent pairs *)
inline_for_extraction
let synth_dtuple2
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: t2 x)
: Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
= (| x, y |)
let parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2))
= parse_tagged_union
p1
dfst
(fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x))
inline_for_extraction
let synth_dtuple2_recip
(#t1: Type)
(#t2: t1 -> Type)
(x: t1)
(y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x)
: Tot (t2 x)
= dsnd y
val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
val parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
let bare_parse_dtuple2
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
: Tot (bare_parser (dtuple2 t1 t2))
= fun b ->
match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let parse_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b)
= parse_dtuple2_eq p1 p2 b
val serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
let bare_serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: GTot bytes
= serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
let serialize_dtuple2_eq'
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Tot (squash (
(serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy)))
= serialize_dtuple2_eq s1 s2 xy
(* Special case for non-dependent parsing *)
val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
#set-options "--z3rlimit 16"
val nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let bare_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(s1: serializer p1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(s2: serializer p2)
: Tot (bare_serializer (t1 * t2))
= fun (x: t1 * t2) ->
let (x1, x2) = x in
Seq.append (s1 x1) (s2 x2)
val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
val serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
val length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
val serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
val serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
let serialize_nondep_then_upd_bw_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y)
))
= serialize_nondep_then_upd_right s1 s2 x y
let serialize_nondep_then_upd_bw_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_bw_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s'
))
= let s2' = serialize s2 (snd x) in
let j' = Seq.length s2' - i' - Seq.length s' in
assert (j' + Seq.length s' <= Seq.length s2');
assert (serialize s2 y == seq_upd_seq s2' j' s');
let s = serialize (serialize_nondep_then s1 s2) x in
serialize_nondep_then_upd_right_chain s1 s2 x y j' s';
assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s');
()
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
(** Apply a total transformation on parsed data *)
let parse_strengthen_prf
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
: Tot Type
= (xbytes: bytes) ->
(consumed: consumed_length xbytes) ->
(x: t1) ->
Lemma
(requires (parse p1 xbytes == Some (x, consumed)))
(ensures (p2 x))
let bare_parse_strengthen
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Tot (bare_parser (x: t1 { p2 x } ))
= fun (xbytes: bytes) ->
match parse p1 xbytes with
| Some (x, consumed) ->
prf xbytes consumed x;
let (x' : t1 { p2 x' } ) = x in
Some (x', consumed)
| _ -> None
let bare_parse_strengthen_no_lookahead
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: t1 -> GTot Type0)
(prf: parse_strengthen_prf p1 p2)
: Lemma
(no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf))
= let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.Combinators.fsti"
} | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p1: LowParse.Spec.Base.parser k t1 ->
p2: (_: t1 -> Prims.GTot Type0) ->
prf: LowParse.Spec.Combinators.parse_strengthen_prf p1 p2
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.injective (LowParse.Spec.Combinators.bare_parse_strengthen p1 p2 prf)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.parse_strengthen_prf",
"Prims._assert",
"Prims.l_Forall",
"LowParse.Bytes.bytes",
"Prims.l_imp",
"LowParse.Spec.Base.injective_postcond",
"Prims.unit",
"LowParse.Spec.Base.injective_precond",
"LowParse.Spec.Base.bare_parser",
"LowParse.Spec.Combinators.bare_parse_strengthen",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"Prims.l_True",
"Prims.squash",
"LowParse.Spec.Base.injective",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let bare_parse_strengthen_injective
(#k: parser_kind)
(#t1: Type)
(p1: parser k t1)
(p2: (t1 -> GTot Type0))
(prf: parse_strengthen_prf p1 p2)
: Lemma (injective (bare_parse_strengthen p1 p2 prf)) =
| parser_kind_prop_equiv k p1;
let p':bare_parser (x: t1{p2 x}) = bare_parse_strengthen p1 p2 prf in
assert (forall (b1: bytes) (b2: bytes). injective_precond p' b1 b2 ==> injective_precond p1 b1 b2);
assert (forall (b1: bytes) (b2: bytes). injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2) | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.