effect
stringclasses
48 values
original_source_type
stringlengths
0
23k
opens_and_abbrevs
listlengths
2
92
isa_cross_project_example
bool
1 class
source_definition
stringlengths
9
57.9k
partial_definition
stringlengths
7
23.3k
is_div
bool
2 classes
is_type
null
is_proof
bool
2 classes
completed_definiton
stringlengths
1
250k
dependencies
dict
effect_flags
sequencelengths
0
2
ideal_premises
sequencelengths
0
236
mutual_with
sequencelengths
0
11
file_context
stringlengths
0
407k
interleaved
bool
1 class
is_simply_typed
bool
2 classes
file_name
stringlengths
5
48
vconfig
dict
is_simple_lemma
null
source_type
stringlengths
10
23k
proof_features
sequencelengths
0
1
name
stringlengths
8
95
source
dict
verbose_type
stringlengths
1
7.42k
source_range
dict
FStar.Tactics.Effect.Tac
val filter_goals (l: list goal) : Tac (list goal * list goal)
[ { "abbrev": false, "full_module": "FStar.Reflection.V2.Derived.Lemmas", "short_module": null }, { "abbrev": true, "full_module": "FStar.Algebra.CommMonoid.Equiv", "short_module": "CE" }, { "abbrev": false, "full_module": "FStar.Tactics.CanonCommMonoidSimple.Equiv", "short_module": null }, { "abbrev": false, "full_module": "FStar.Tactics.V2", "short_module": null }, { "abbrev": true, "full_module": "FStar.Tactics.V2", "short_module": "T" }, { "abbrev": false, "full_module": "FStar.Ghost", "short_module": null }, { "abbrev": true, "full_module": "FStar.FunctionalExtensionality", "short_module": "FExt" }, { "abbrev": true, "full_module": "Steel.Memory", "short_module": "Mem" }, { "abbrev": false, "full_module": "Steel.Memory", "short_module": null }, { "abbrev": false, "full_module": "Steel.Effect", "short_module": null }, { "abbrev": false, "full_module": "Steel.Effect", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let rec filter_goals (l:list goal) : Tac (list goal * list goal) = match l with | [] -> [], [] | hd::tl -> let slgoals, loggoals = filter_goals tl in match term_as_formula' (goal_type hd) with | Comp (Eq t) _ _ -> if Some? t then let b = typ_contains_req_ens (Some?.v t) in if b then ( slgoals, hd::loggoals ) else ( hd::slgoals, loggoals ) else ( hd::slgoals, loggoals ) | App t _ -> if is_fvar t (`%squash) then hd::slgoals, loggoals else slgoals, loggoals | _ -> slgoals, loggoals
val filter_goals (l: list goal) : Tac (list goal * list goal) let rec filter_goals (l: list goal) : Tac (list goal * list goal) =
true
null
false
match l with | [] -> [], [] | hd :: tl -> let slgoals, loggoals = filter_goals tl in match term_as_formula' (goal_type hd) with | Comp (Eq t) _ _ -> if Some? t then let b = typ_contains_req_ens (Some?.v t) in if b then (slgoals, hd :: loggoals) else (hd :: slgoals, loggoals) else (hd :: slgoals, loggoals) | App t _ -> if is_fvar t (`%squash) then hd :: slgoals, loggoals else slgoals, loggoals | _ -> slgoals, loggoals
{ "checked_file": "Steel.Effect.Common.fsti.checked", "dependencies": [ "Steel.Memory.fsti.checked", "prims.fst.checked", "FStar.Tactics.V2.fst.checked", "FStar.Tactics.CanonCommMonoidSimple.Equiv.fst.checked", "FStar.String.fsti.checked", "FStar.Squash.fsti.checked", "FStar.Set.fsti.checked", "FStar.Reflection.V2.Derived.Lemmas.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.Base.fst.checked", "FStar.List.Tot.fst.checked", "FStar.Ghost.fsti.checked", "FStar.FunctionalExtensionality.fsti.checked", "FStar.Classical.fsti.checked", "FStar.Algebra.CommMonoid.Equiv.fst.checked" ], "interface_file": false, "source_file": "Steel.Effect.Common.fsti" }
[]
[ "Prims.list", "FStar.Tactics.Types.goal", "FStar.Pervasives.Native.Mktuple2", "Prims.Nil", "FStar.Pervasives.Native.tuple2", "FStar.Pervasives.Native.option", "FStar.Reflection.Types.typ", "FStar.Tactics.NamedView.term", "FStar.Pervasives.Native.uu___is_Some", "Prims.Cons", "Prims.bool", "Steel.Effect.Common.typ_contains_req_ens", "FStar.Pervasives.Native.__proj__Some__item__v", "FStar.Reflection.V2.Derived.is_fvar", "FStar.Reflection.V2.Formula.formula", "FStar.Reflection.V2.Formula.term_as_formula'", "FStar.Tactics.Types.goal_type", "Steel.Effect.Common.filter_goals" ]
[]
(* Copyright 2020 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module Steel.Effect.Common open Steel.Memory module Mem = Steel.Memory module FExt = FStar.FunctionalExtensionality open FStar.Ghost /// This module provides various predicates and functions which are common to the /// different Steel effects. /// It also contains the tactic responsible for frame inference through a variant of AC-unification #set-options "--ide_id_info_off" (* Normalization helpers *) irreducible let framing_implicit : unit = () irreducible let __steel_reduce__ : unit = () /// An internal attribute for finer-grained normalization in framing equalities irreducible let __inner_steel_reduce__ : unit = () irreducible let __reduce__ : unit = () irreducible let smt_fallback : unit = () irreducible let ite_attr : unit = () // Needed to avoid some logical vs prop issues during unification with no subtyping [@@__steel_reduce__] unfold let true_p : prop = True module T = FStar.Tactics.V2 let join_preserves_interp (hp:slprop) (m0:hmem hp) (m1:mem{disjoint m0 m1}) : Lemma (interp hp (join m0 m1)) [SMTPat (interp hp (join m0 m1))] = let open Steel.Memory in intro_emp m1; intro_star hp emp m0 m1; affine_star hp emp (join m0 m1) (* Definition of a selector for a given slprop *) /// A selector of type `a` for a separation logic predicate hp is a function /// from a memory where the predicate hp holds, which returns a value of type `a`. /// The effect GTot indicates that selectors are ghost functions, used for specification /// and proof purposes only let selector' (a:Type0) (hp:slprop) = hmem hp -> GTot a /// Self-framing property for selectors let sel_depends_only_on (#a:Type) (#hp:slprop) (sel:selector' a hp) = forall (m0:hmem hp) (m1:mem{disjoint m0 m1}). (interp_depends_only_on hp; ( sel m0 == sel (join m0 m1))) /// Additional property that selectors must satisfy, related to internals of /// the Steel memory model encoding let sel_depends_only_on_core (#a:Type) (#hp:slprop) (sel:selector' a hp) = forall (m0:hmem hp). sel m0 == sel (core_mem m0) /// Full definition of a selector, as a function which satisfies the two predicates above let selector (a:Type) (hp:slprop) : Type = sel:selector' a hp{sel_depends_only_on sel /\ sel_depends_only_on_core sel} /// The basis of our selector framework: Separation logic assertions enhanced with selectors /// Note that selectors are "optional", it is always possible to use a non-informative selector, /// such as fun _ -> () and to rely on the standard separation logic reasoning [@@ erasable] noeq type vprop' = { hp: slprop u#1; t:Type0; sel: selector t hp} (* Lifting the star operator to an inductive type makes normalization and implementing some later functions easier *) [@@__steel_reduce__; erasable] noeq type vprop = | VUnit : vprop' -> vprop | VStar: vprop -> vprop -> vprop (* A generic lift from slprop to vprop with a non-informative selector *) [@@ __steel_reduce__] let to_vprop' (p:slprop) = {hp = p; t = unit; sel = fun _ -> ()} [@@ __steel_reduce__] unfold let to_vprop (p:slprop) = VUnit (to_vprop' p) /// Normalization steps for norm below. /// All functions marked as `unfold`, or with the `__steel_reduce__` attribute will be reduced, /// as well as some functions internal to the selector framework unfold let normal_steps = [delta_attr [`%__steel_reduce__; `%__inner_steel_reduce__]; delta_only [`%Mkvprop'?.t; `%Mkvprop'?.hp; `%Mkvprop'?.sel; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit]; delta_qualifier ["unfold"]; iota;zeta;primops; simplify] /// The core normalization primitive used to simplify Verification Conditions before encoding /// them to an SMT solver. unfold let normal (#a:Type) (x:a) = norm normal_steps x /// An abbreviation for the VStar constructor, allowing to use it with infix notation [@@ __steel_reduce__; __reduce__] let star = VStar /// Extracting the underlying separation logic assertion from a vprop [@@ __steel_reduce__] let rec hp_of (p:vprop) = match p with | VUnit p -> p.hp | VStar p1 p2 -> hp_of p1 `Mem.star` hp_of p2 /// Extracting the selector type from a vprop [@@ __steel_reduce__] let rec t_of (p:vprop) = match p with | VUnit p -> p.t | VStar p1 p2 -> t_of p1 * t_of p2 /// Extracting the selector from a vprop [@@ __steel_reduce__] let rec sel_of (p:vprop) : GTot (selector (t_of p) (hp_of p)) = match p with | VUnit p -> fun h -> p.sel h | VStar p1 p2 -> let sel1 = sel_of p1 in let sel2 = sel_of p2 in fun h -> (sel1 h, sel2 h) /// Type abbreviations for separation logic pre- and postconditions of the Steel effects type pre_t = vprop type post_t (a:Type) = a -> vprop /// An annotation to indicate which separation logic predicates correspond to monadic computations /// These computations are handled in a specific manner in the framing tactic; they correspond to places where /// the context shrinks from all local variables in the computation to variables available at the toplevel let return_pre (p:vprop) : vprop = p noextract let hmem (p:vprop) = hmem (hp_of p) /// Abstract predicate for vprop implication. Currently implemented as an implication on the underlying slprop val can_be_split (p q:pre_t) : Type0 /// Exposing the implementation of `can_be_split` when needed for proof purposes val reveal_can_be_split (_:unit) : Lemma (forall p q. can_be_split p q == Mem.slimp (hp_of p) (hp_of q)) /// A targeted version of the above val can_be_split_interp (r r':vprop) (h:hmem r) : Lemma (requires can_be_split r r') (ensures interp (hp_of r') h) /// A dependent version of can_be_split, to be applied to dependent postconditions let can_be_split_forall (#a:Type) (p q:post_t a) = forall x. can_be_split (p x) (q x) /// A version of can_be_split which is indexed by a proposition, which can be used for equalities abduction let can_be_split_dep (p:prop) (t1 t2:pre_t) = p ==> can_be_split t1 t2 /// A dependent version of the above predicate let can_be_split_forall_dep (#a:Type) (p:a -> prop) (t1 t2:post_t a) = forall (x:a). p x ==> can_be_split (t1 x) (t2 x) (* Some lemmas about the can_be_split* predicates, to be used as rewriting rules for the abstract predicates *) val can_be_split_trans (p q r:vprop) : Lemma (requires p `can_be_split` q /\ q `can_be_split` r) (ensures p `can_be_split` r) let can_be_split_trans_rev (p q r:vprop) : Lemma (requires q `can_be_split` r /\ p `can_be_split` q) (ensures p `can_be_split` r) = can_be_split_trans p q r val can_be_split_star_l (p q:vprop) : Lemma (ensures (p `star` q) `can_be_split` p) [SMTPat ((p `star` q) `can_be_split` p)] val can_be_split_star_r (p q:vprop) : Lemma (ensures (p `star` q) `can_be_split` q) [SMTPat ((p `star` q) `can_be_split` q)] val can_be_split_refl (p:vprop) : Lemma (p `can_be_split` p) [SMTPat (p `can_be_split` p)] val can_be_split_congr_l (p q r: vprop) : Lemma (requires (p `can_be_split` q)) (ensures ((p `star` r) `can_be_split` (q `star` r))) val can_be_split_congr_r (p q r: vprop) : Lemma (requires (p `can_be_split` q)) (ensures ((r `star` p) `can_be_split` (r `star` q))) let prop_and (p1 p2: prop) : Tot prop = p1 /\ p2 let can_be_split_forall_dep_trans_rev (#a: Type) (cond1 cond2: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond2 q r /\ can_be_split_forall_dep cond1 p q)) (ensures (can_be_split_forall_dep (fun x -> cond1 x `prop_and` cond2 x) p r)) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_trans x y) z) let can_be_split_forall_dep_congr_l (#a: Type) (cond: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond p q)) (ensures (can_be_split_forall_dep cond (fun x -> p x `star` r x) (fun x -> q x `star` r x))) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_congr_l x y) z) let can_be_split_forall_dep_congr_r (#a: Type) (cond: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond p q)) (ensures (can_be_split_forall_dep cond (fun x -> r x `star` p x) (fun x -> r x `star` q x))) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_congr_r x y) z) /// To simplify the implementation of the framing tactic, dependent equivalence /// is defined as a double dependent implication let equiv_forall (#a:Type) (t1 t2:post_t a) : Type0 = t1 `can_be_split_forall` t2 /\ t2 `can_be_split_forall` t1 /// This equivalence models a context restriction at the end of a Steel computation; /// note that t2 does not depend on the value of type `a`, but the two vprops must be /// equivalent let can_be_split_post (#a #b:Type) (t1:a -> post_t b) (t2:post_t b) = forall (x:a). equiv_forall (t1 x) t2 /// Lifting the equivalence relation to vprops. Two vprops are equivalent if the underlying slprops /// are equivalent val equiv (p q:vprop) : prop /// Revealing the definition of vprop equivalence when needed for proof purposes. /// In other cases, the predicate is abstract val reveal_equiv (p q:vprop) : Lemma (p `equiv` q <==> hp_of p `Mem.equiv` hp_of q) (* A restricted view of the heap, that only allows to access selectors of the current slprop *) let rmem' (pre:vprop) = FExt.restricted_g_t (r0:vprop{can_be_split pre r0}) (fun r0 -> normal (t_of r0)) /// Ensuring that rmems encapsulate the structure induced by the separation logic star val valid_rmem (#frame:vprop) (h:rmem' frame) : prop unfold let rmem (pre:vprop) = h:rmem' pre{valid_rmem h} /// Exposing the definition of mk_rmem to better normalize Steel VCs unfold noextract let unrestricted_mk_rmem (r:vprop) (h:hmem r) = fun (r0:vprop{r `can_be_split` r0}) -> can_be_split_interp r r0 h; sel_of r0 h [@@ __inner_steel_reduce__] noextract let mk_rmem' (r:vprop) (h:hmem r) : Tot (rmem' r) = FExt.on_dom_g (r0:vprop{r `can_be_split` r0}) (unrestricted_mk_rmem r h) val lemma_valid_mk_rmem (r:vprop) (h:hmem r) : Lemma (valid_rmem (mk_rmem' r h)) [@@ __inner_steel_reduce__] noextract let mk_rmem (r:vprop) (h:hmem r) : Tot (rmem r) = lemma_valid_mk_rmem r h; mk_rmem' r h val reveal_mk_rmem (r:vprop) (h:hmem r) (r0:vprop{r `can_be_split` r0}) : Lemma (ensures reveal_can_be_split(); (mk_rmem r h) r0 == sel_of r0 h) (* Logical pre and postconditions can only access the restricted view of the heap *) type req_t (pre:pre_t) = rmem pre -> Type0 type ens_t (pre:pre_t) (a:Type) (post:post_t a) = rmem pre -> (x:a) -> rmem (post x) -> Type0 (* Empty assertion *) val emp : vprop /// When needed for proof purposes, the empty assertion is a direct lift of the /// empty assertion from Steel.Memory val reveal_emp (_:unit) : Lemma (hp_of emp == Mem.emp /\ t_of emp == unit) /// Lifting pure predicates to vprop [@@__steel_reduce__] unfold let pure (p:prop) = to_vprop (pure p) /// Framing predicates for the Steel effect. If the current computation has already /// been framed, then the additional frame is the empty predicate let maybe_emp (framed:bool) (frame:pre_t) = if framed then frame == emp else True /// Dependent version of the above predicate, usable in dependent postconditions let maybe_emp_dep (#a:Type) (framed:bool) (frame:post_t a) = if framed then (forall x. frame x == emp) else True (* focus_rmem is an additional restriction of our view of memory. We expose it here to be able to reduce through normalization; Any valid application of focus_rmem h will be reduced to the application of h *) [@@ __steel_reduce__] unfold let unrestricted_focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) = fun (r':vprop{can_be_split r0 r'}) -> can_be_split_trans r r0 r'; h r' [@@ __inner_steel_reduce__] let focus_rmem' (#r: vprop) (h: rmem r) (r0: vprop{r `can_be_split` r0}) : Tot (rmem' r0) = FExt.on_dom_g (r':vprop{can_be_split r0 r'}) (unrestricted_focus_rmem h r0) val lemma_valid_focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) : Lemma (valid_rmem (focus_rmem' h r0)) [@@ __inner_steel_reduce__] let focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) : Tot (rmem r0) = lemma_valid_focus_rmem h r0; focus_rmem' h r0 /// Exposing that calling focus_rmem on the current context corresponds to an equality let focus_rmem_refl (r:vprop) (h:rmem r) : Lemma (focus_rmem #r h r == h) = FStar.FunctionalExtensionality.extensionality_g _ _ (focus_rmem #r h r) h open FStar.Tactics.V2 /// State that all "atomic" subresources have the same selectors on both views. /// The predicate has the __steel_reduce__ attribute, ensuring that VC normalization /// will reduce it to a conjunction of equalities on atomic subresources /// This predicate is also marked as `strict_on_arguments` on [frame], ensuring that /// it will not be reduced when the frame is symbolic /// Instead, the predicate will be rewritten to an equality using `lemma_frame_equalities` below [@@ __steel_reduce__; strict_on_arguments [0]] let rec frame_equalities' (frame:vprop) (h0:rmem frame) (h1:rmem frame) : Type0 = begin match frame with | VUnit p -> h0 frame == h1 frame | VStar p1 p2 -> can_be_split_star_l p1 p2; can_be_split_star_r p1 p2; let h01 = focus_rmem h0 p1 in let h11 = focus_rmem h1 p1 in let h02 = focus_rmem h0 p2 in let h12 = focus_rmem h1 p2 in frame_equalities' p1 h01 h11 /\ frame_equalities' p2 h02 h12 end /// This lemma states that frame_equalities is the same as an equality on the top-level frame. /// The uncommon formulation with an extra [p] is needed to use in `rewrite_with_tactic`, /// where the goal is of the shape `frame_equalities frame h0 h1 == ?u` /// The rewriting happens below, in `frame_vc_norm` val lemma_frame_equalities (frame:vprop) (h0:rmem frame) (h1:rmem frame) (p:Type0) : Lemma (requires (h0 frame == h1 frame) == p) (ensures frame_equalities' frame h0 h1 == p) /// A special case for frames about emp. val lemma_frame_emp (h0:rmem emp) (h1:rmem emp) (p:Type0) : Lemma (requires True == p) (ensures frame_equalities' emp h0 h1 == p) /// A variant of conjunction elimination, suitable to the equality goals during rewriting val elim_conjunction (p1 p1' p2 p2':Type0) : Lemma (requires p1 == p1' /\ p2 == p2') (ensures (p1 /\ p2) == (p1' /\ p2')) /// Normalization and rewriting step for generating frame equalities. /// The frame_equalities function has the strict_on_arguments attribute on the [frame], /// ensuring that it is not reduced when the frame is symbolic. /// When that happens, we want to replace frame_equalities by an equality on the frame, /// mimicking reduction [@@plugin] let frame_vc_norm () : Tac unit = with_compat_pre_core 0 (fun _ -> // Do not normalize mk_rmem/focus_rmem to simplify application of // the reflexivity lemma on frame_equalities' norm [delta_attr [`%__steel_reduce__]; delta_only [`%Mkvprop'?.t; `%Mkvprop'?.hp; `%Mkvprop'?.sel; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit]; delta_qualifier ["unfold"]; iota;zeta;primops; simplify]; // After reduction, the term to rewrite might be of the shape // (frame_equalities' ... /\ frame_equalities' .. /\ ...) == ?u, // with some frame_equalities' possibly already fully reduced // We repeatedly split the clause and extract the term on the left // to generate equalities on atomic subresources ignore (repeat (fun _ -> // Try to split the conjunction. If there is no conjunction, we exit the repeat apply_lemma (`elim_conjunction); // Dismiss the two uvars created for the RHS, they'll be solved by unification dismiss (); dismiss (); // The first goal is the left conjunction split (); // Removes the frame equality if it is about emp or_else (fun _ -> apply_lemma (`lemma_frame_emp); dismiss()) (fun _ -> ()); // Rewrites the frame_equalities if it wasn't yet reduced or_else (fun _ -> apply_lemma (`lemma_frame_equalities); dismiss ()) (fun _ -> ()); norm normal_steps; // Finally solve the uvar, finishing the rewriting for this clause trefl () )); // Removes the frame equality if it is about emp or_else (fun _ -> apply_lemma (`lemma_frame_emp); dismiss()) (fun _ -> ()); // We do not have conjunctions anymore, we try to apply the frame_equalities rewriting // If it fails, the frame was not symbolic, so there is nothing to do or_else (fun _ -> apply_lemma (`lemma_frame_equalities); dismiss ()) (fun _ -> ()); norm normal_steps; trefl ()) [@@ __steel_reduce__] unfold let frame_equalities (frame:vprop) (h0:rmem frame) (h1:rmem frame) : prop = rewrite_with_tactic frame_vc_norm (frame_equalities' frame h0 h1) /// More lemmas about the abstract can_be_split predicates, to be used as /// rewriting rules in the tactic below val can_be_split_dep_refl (p:vprop) : Lemma (can_be_split_dep true_p p p) val equiv_can_be_split (p1 p2:vprop) : Lemma (requires p1 `equiv` p2) (ensures p1 `can_be_split` p2) val intro_can_be_split_frame (p q:vprop) (frame:vprop) : Lemma (requires q `equiv` (p `star` frame)) (ensures can_be_split q p /\ True) val can_be_split_post_elim (#a #b:Type) (t1:a -> post_t b) (t2:post_t b) : Lemma (requires (forall (x:a) (y:b). t1 x y `equiv` t2 y)) (ensures t1 `can_be_split_post` t2) val equiv_forall_refl (#a:Type) (t:post_t a) : Lemma (t `equiv_forall` t) val equiv_forall_elim (#a:Type) (t1 t2:post_t a) : Lemma (requires (forall (x:a). t1 x `equiv` t2 x)) (ensures t1 `equiv_forall` t2) open FStar.Tactics.CanonCommMonoidSimple.Equiv (* equiv is an equivalence relation on vprops *) /// Lemmas establishing the equivalence properties on equiv val equiv_refl (x:vprop) : Lemma (equiv x x) val equiv_sym (x y:vprop) : Lemma (requires equiv x y) (ensures equiv y x) val equiv_trans (x y z:vprop) : Lemma (requires equiv x y /\ equiv y z) (ensures equiv x z) module CE = FStar.Algebra.CommMonoid.Equiv /// Equiv is an equivalence relation for vprops elements inline_for_extraction noextract let req : CE.equiv vprop = CE.EQ equiv equiv_refl equiv_sym equiv_trans (* Star induces a commutative monoid for the equiv equivalence relation *) /// Lemmas establishing the commutative monoid properties val cm_identity (x:vprop) : Lemma ((emp `star` x) `equiv` x) val star_commutative (p1 p2:vprop) : Lemma ((p1 `star` p2) `equiv` (p2 `star` p1)) val star_associative (p1 p2 p3:vprop) : Lemma (((p1 `star` p2) `star` p3) `equiv` (p1 `star` (p2 `star` p3))) val star_congruence (p1 p2 p3 p4:vprop) : Lemma (requires p1 `equiv` p3 /\ p2 `equiv` p4) (ensures (p1 `star` p2) `equiv` (p3 `star` p4)) /// Star induces a commutative monoid on vprops [@__steel_reduce__] inline_for_extraction noextract let rm : CE.cm vprop req = CE.CM emp star cm_identity star_associative star_commutative star_congruence (*** Vprop combinators ***) (* Refining a vprop with a selector predicate *) /// Separation logic predicate stating the validity of a vprop with an additional refinement on its selector val vrefine_hp (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot (slprop u#1) /// Exposing the validity of the above predicate when needed for proof purposes val interp_vrefine_hp (v: vprop) (p: (normal (t_of v) -> Tot prop)) (m: mem) : Lemma (interp (vrefine_hp v p) m <==> (interp (hp_of v) m /\ p (sel_of v m))) /// Selector type for a refined vprop [@__steel_reduce__] let vrefine_t (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot Type = (x: t_of v {p x}) /// Selector of a refined vprop. Returns a value which satisfies the refinement predicate val vrefine_sel (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot (selector (vrefine_t v p) (vrefine_hp v p)) /// Exposing the definition of the refined selector val vrefine_sel_eq (v: vprop) (p: (normal (t_of v) -> Tot prop)) (m: Mem.hmem (vrefine_hp v p)) : Lemma ( interp (hp_of v) m /\ vrefine_sel v p m == sel_of v m ) // [SMTPat ((vrefine_sel v p) m)] // FIXME: this pattern causes Z3 "wrong number of argument" errors /// Combining the above pieces to define a vprop refined by a selector prediacte [@__steel_reduce__] let vrefine' (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot vprop' = { hp = vrefine_hp v p; t = vrefine_t v p; sel = vrefine_sel v p; } [@__steel_reduce__] let vrefine (v: vprop) (p: (normal (t_of v) -> Tot prop)) = VUnit (vrefine' v p) (* Dependent star for vprops *) /// Separation logic predicate corresponding to a dependent star, /// where the second predicate depends on the selector value of the first val vdep_hp (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot (slprop u#1) /// Exposing the validity of the above predicate when needed for proof purposes val interp_vdep_hp (v: vprop) (p: ( (t_of v) -> Tot vprop)) (m: mem) : Lemma (interp (vdep_hp v p) m <==> (interp (hp_of v) m /\ interp (hp_of v `Mem.star` hp_of (p (sel_of v m))) m)) /// Helper to define the selector type of the second component of the dependent star let vdep_payload (v: vprop) (p: ( (t_of v) -> Tot vprop)) (x: t_of v) : Tot Type = t_of (p x) /// Selector type for the dependent star: A dependent tuple, where the second component's type depends on the first vprop let vdep_t (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot Type = dtuple2 (t_of v) (vdep_payload v p) /// Selector for the dependent star val vdep_sel (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot (selector (vdep_t v p) (vdep_hp v p)) /// Exposing the definition of the dependent star's selector when needed for proof purposes val vdep_sel_eq (v: vprop) (p: ( (t_of v) -> Tot vprop)) (m: Mem.hmem (vdep_hp v p)) : Lemma ( interp (hp_of v) m /\ begin let x = sel_of v m in interp (hp_of (p x)) m /\ vdep_sel v p m == (| x, sel_of (p x) m |) end ) /// Combining the elements above to create a dependent star vprop [@__steel_reduce__] let vdep' (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot vprop' = { hp = vdep_hp v p; t = vdep_t v p; sel = vdep_sel v p; } [@__steel_reduce__] let vdep (v: vprop) (p: ( (t_of v) -> Tot vprop)) = VUnit (vdep' v p) (* Selector rewrite combinator *) /// The selector of a rewrite combinator applies a function `f` to the current selector of a vprop. val vrewrite_sel (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot (selector t (normal (hp_of v))) /// Exposing the definition of the above selector val vrewrite_sel_eq (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) (h: Mem.hmem (normal (hp_of v))) : Lemma ((vrewrite_sel v f <: selector' _ _) h == f ((normal (sel_of v) <: selector' _ _) h)) // [SMTPat (vrewrite_sel v f h)] // FIXME: this pattern causes Z3 "wrong number of argument" errors /// Combining the above elements to create a rewrite vprop [@__steel_reduce__] let vrewrite' (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot vprop' = { hp = normal (hp_of v); t = t; sel = vrewrite_sel v f; } [@__steel_reduce__] let vrewrite (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot vprop = VUnit (vrewrite' v f) (*** Framing tactic ***) (* Specialize visit_tm from the standard F* tactic library to reimplement name_appears_in. AF: As of Jan 14, 2021, calling name_appears_in from FStar.Tactics.Derived leads to a segfault *) exception Appears let on_sort_binder (f : term -> Tac unit) (b:binder) : Tac unit = f b.sort let rec visit_tm (ff : term -> Tac unit) (t : term) : Tac unit = let tv = inspect t in (match tv with | Tv_FVar _ | Tv_UInst _ _ | Tv_Var _ | Tv_BVar _ -> () | Tv_Type _ -> () | Tv_Const c -> () | Tv_Uvar i u -> () | Tv_Unsupp -> () | Tv_Unknown -> () | Tv_Arrow b c -> on_sort_binder ff b; visit_comp ff c | Tv_Abs b t -> let b = on_sort_binder (visit_tm ff) b in visit_tm ff t | Tv_App l (r, q) -> visit_tm ff l; visit_tm ff r | Tv_Refine b r -> on_sort_binder ff b; visit_tm ff r | Tv_Let r attrs b def t -> on_sort_binder ff b; visit_tm ff def; visit_tm ff t | Tv_Match sc _ brs -> visit_tm ff sc; iter (visit_br ff) brs | Tv_AscribedT e t topt _ -> visit_tm ff e; visit_tm ff t | Tv_AscribedC e c topt _ -> visit_tm ff e ); ff t and visit_br (ff : term -> Tac unit) (b:branch) : Tac unit = let (p, t) = b in visit_tm ff t and visit_comp (ff : term -> Tac unit) (c : comp) : Tac unit = let cv = inspect_comp c in match cv with | C_Total ret -> visit_tm ff ret | C_GTotal ret -> visit_tm ff ret | C_Lemma pre post pats -> visit_tm ff pre; visit_tm ff post; visit_tm ff pats | C_Eff us eff res args decrs -> visit_tm ff res; iter (fun (a, q) -> visit_tm ff a) args; iter (visit_tm ff) decrs /// Decides whether a top-level name [nm] syntactically /// appears in the term [t]. let name_appears_in (nm:name) (t:term) : Tac bool = let ff (t : term) : Tac unit = match inspect t with | Tv_FVar fv -> if inspect_fv fv = nm then raise Appears | t -> () in try ignore (visit_tm ff t); false with | Appears -> true | e -> raise e /// Checks whether term [t] appears in term [i] let term_appears_in (t:term) (i:term) : Tac bool = name_appears_in (explode_qn (term_to_string t)) i /// We define a small language to handle arbitrary separation logic predicates. /// Separation logic predicates are encoded as atoms for which equality is decidable, /// here represented as integers let atom : eqtype = int let rec atoms_to_string (l:list atom) = match l with | [] -> "" | hd::tl -> string_of_int hd ^ " " ^ atoms_to_string tl /// Reflecting the structure of our separation logic on atmos type exp : Type = | Unit : exp | Mult : exp -> exp -> exp | Atom : atom -> exp /// A map from atoms to the terms they represent. /// The second component of the term corresponds to a default element, /// ensuring we never raise an exception when trying to access an element in the map let amap (a:Type) = list (atom * a) * a /// An empty atom map: The list map is empty let const (#a:Type) (xa:a) : amap a = ([], xa) /// Accessing an element in the atom map // We reimplement List.Tot.Base.assoc because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_assoc (#key: eqtype) (#value: Type) (k: key) (dict: list (key & value)) : Pure (option value) (requires True) (ensures (fun res -> res == List.Tot.assoc k dict)) = match dict with | [] -> None | (k', v') :: q -> if k = k' then Some v' else my_assoc k q let select (#a:Type) (x:atom) (am:amap a) : Tot a = match my_assoc #atom #a x (fst am) with | Some a -> a | _ -> snd am /// Updating the atom map. Since select finds the first element corresponding to /// the atom in the list and we do not have any remove function, /// we can simply append the new element at the head without removing any possible /// previous element let update (#a:Type) (x:atom) (xa:a) (am:amap a) : amap a = (x, xa)::fst am, snd am /// Check whether the current term is an unresolved vprop unification variable. /// This can happen if either it is a uvar, or it is an unresolved dependent /// vprop uvar which is applied to some argument let is_uvar (t:term) : Tac bool = match inspect t with | Tv_Uvar _ _ -> true | Tv_App _ _ -> let hd, args = collect_app t in Tv_Uvar? (inspect hd) | _ -> false /// For a given term t, collect all terms in the list l with the same head symbol let rec get_candidates (t:term) (l:list term) : Tac (list term) = let name, _ = collect_app t in match l with | [] -> [] | hd::tl -> let n, _ = collect_app hd in if term_eq_old n name then ( hd::(get_candidates t tl) ) else get_candidates t tl /// Try to remove a term that is exactly matching, not just that can be unified let rec trivial_cancel (t:atom) (l:list atom) = match l with | [] -> false, l | hd::tl -> if hd = t then // These elements match, we remove them true, tl else (let b, res = trivial_cancel t tl in b, hd::res) /// Call trivial_cancel on all elements of l1. /// The first two lists returned are the remainders of l1 and l2. /// The last two lists are the removed parts of l1 and l2, with /// the additional invariant that they are equal let rec trivial_cancels (l1 l2:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l1 with | [] -> [], l2, [], [] | hd::tl -> let b, l2' = trivial_cancel hd l2 in let l1', l2', l1_del, l2_del = trivial_cancels tl l2' am in (if b then l1' else hd::l1'), l2', (if b then hd::l1_del else l1_del), (if b then hd::l2_del else l2_del) exception Failed exception Success /// Helper to print the terms corresponding to the current list of atoms let rec print_atoms (l:list atom) (am:amap term) : Tac string = match l with | [] -> "" | [hd] -> term_to_string (select hd am) | hd::tl -> term_to_string (select hd am) ^ " * " ^ print_atoms tl am /// For a list of candidates l, count the number that can unify with t. /// Does not try to unify with a uvar, this will be done at the very end. /// Tries to unify with slprops with a different head symbol, it might /// be an abbreviation let rec try_candidates (t:atom) (l:list atom) (am:amap term) : Tac (atom * int) = match l with | [] -> t, 0 | hd::tl -> if is_uvar (select hd am) then (try_candidates t tl am) else // Encapsulate unify in a try/with to ensure unification is not actually performed let res = try if unify (select t am) (select hd am) then raise Success else raise Failed with | Success -> true | _ -> false in let t', n' = try_candidates t tl am in if res && hd <> t' then hd, 1 + n' else t', n' /// Remove the given term from the list. Only to be called when /// try_candidates succeeded let rec remove_from_list (t:atom) (l:list atom) : Tac (list atom) = match l with | [] -> fail "atom in remove_from_list not found: should not happen"; [] | hd::tl -> if t = hd then tl else hd::remove_from_list t tl /// Check if two lists of slprops are equivalent by recursively calling /// try_candidates. /// Assumes that only l2 contains terms with the head symbol unresolved. /// It returns all elements that were not resolved during this iteration *) let rec equivalent_lists_once (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l1 with | [] -> [], l2, l1_del, l2_del | hd::tl -> let t, n = try_candidates hd l2 am in if n = 1 then ( let l2 = remove_from_list t l2 in equivalent_lists_once tl l2 (hd::l1_del) (t::l2_del) am ) else ( // Either too many candidates for this scrutinee, or no candidate but the uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once tl l2 l1_del l2_del am in hd::rem1, rem2, l1'_del, l2'_del ) /// Check if two lists of slprops are equivalent by recursively calling /// try_candidates by iterating on l2. /// Assumes that only l2 contains terms with the head symbol unresolved. /// It returns all elements that were not resolved during this iteration *) /// This is very close to equivalent_lists_once above, but helps making progress /// when l1 contains syntactically equal candidates let rec equivalent_lists_once_l2 (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l2 with | [] -> l1, [], l1_del, l2_del | hd::tl -> if is_uvar (select hd am) then // We do not try to match the vprop uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once_l2 l1 tl l1_del l2_del am in rem1, hd::rem2, l1'_del, l2'_del else ( let t, n = try_candidates hd l1 am in if n = 1 then ( let l1 = remove_from_list t l1 in equivalent_lists_once_l2 l1 tl (t::l1_del) (hd::l2_del) am ) else ( // Either too many candidates for this scrutinee, or no candidate but the uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once_l2 l1 tl l1_del l2_del am in rem1, hd::rem2, l1'_del, l2'_del ) ) let get_head (l:list atom) (am:amap term) : term = match l with | [] -> `() | hd::_ -> select hd am /// Checks whether the list of atoms [l] only contains one unresolved uvar let is_only_uvar (l:list atom) (am:amap term) : Tac bool = if List.Tot.Base.length l = 1 then is_uvar (select (List.Tot.Base.hd l) am) else false /// Assumes that u is a uvar, checks that all variables in l can be unified with it. /// Later in the tactic, the uvar will be unified to a star of l *) let rec try_unifying_remaining (l:list atom) (u:term) (am:amap term) : Tac unit = match l with | [] -> () | hd::tl -> try if unify u (select hd am) then raise Success else raise Failed with | Success -> try_unifying_remaining tl u am | _ -> fail ("could not find candidate for scrutinee " ^ term_to_string (select hd am)) /// Is SMT rewriting enabled for this binder let is_smt_binder (b:binder) : Tac bool = let l = b.attrs in not (List.Tot.isEmpty (filter (fun t -> is_fvar t (`%smt_fallback)) l)) /// Creates a new term, where all arguments where SMT rewriting is enabled have been replaced /// by fresh, unconstrained unification variables let rec new_args_for_smt_attrs (env:env) (l:list argv) (ty:typ) : Tac (list argv * list term) = let fresh_ghost_uvar ty = let e = cur_env () in ghost_uvar_env e ty in match l, inspect_unascribe ty with | (arg, aqualv)::tl, Tv_Arrow binder comp -> let needs_smt = is_smt_binder binder in let new_hd = if needs_smt then ( let arg_ty = tc env arg in let uvar = fresh_ghost_uvar arg_ty in unshelve uvar; flip (); (uvar, aqualv) ) else (arg, aqualv) in begin let ty2 = match inspect_comp comp with | C_Total ty2 -> ty2 | C_Eff _ eff_name ty2 _ _ -> if eff_name = ["Prims"; "Tot"] then ty2 else fail "computation type not supported in definition of slprops" | _ -> fail "computation type not supported in definition of slprops" in let tl_argv, tl_terms = new_args_for_smt_attrs env tl ty2 in new_hd::tl_argv, (if needs_smt then arg::tl_terms else tl_terms) end | [], Tv_FVar fv -> [], [] | _ -> fail "should not happen. Is an slprop partially applied?" /// Rewrites all terms in the context to enable SMT rewriting through the use of fresh, unconstrained unification variables let rewrite_term_for_smt (env:env) (am:amap term * list term) (a:atom) : Tac (amap term * list term) = let am, prev_uvar_terms = am in let term = select a am in let hd, args = collect_app term in let t = tc env hd in let new_args, uvar_terms = new_args_for_smt_attrs env args t in let new_term = mk_app hd new_args in update a new_term am, List.Tot.append uvar_terms prev_uvar_terms /// User-facing error message when the framing tactic fails let fail_atoms (#a:Type) (l1 l2:list atom) (am:amap term) : Tac a = fail ("could not find a solution for unifying\n" ^ print_atoms l1 am ^ "\nand\n" ^ print_atoms l2 am) /// Variant of equivalent_lists' below to be called once terms have been rewritten to allow SMT rewriting. /// If unification succeeds and we have unicity of the solution, this tactic will succeed, /// and ultimately create an SMT guard that the two terms are actually equal let rec equivalent_lists_fallback (n:nat) (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * bool) = match l1 with | [] -> begin match l2 with | [] -> (l1_del, l2_del, false) | [hd] -> // Succeed if there is only one uvar left in l2, which can be therefore // be unified with emp if is_uvar (select hd am) then ( // xsdenote is left associative: We put hd at the top to get // ?u `star` p <==> emp `star` p (l1_del, hd :: l2_del, true)) else fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) | _ -> fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) end | _ -> if is_only_uvar l2 am then ( // Terms left in l1, but only a uvar left in l2. // Put all terms left at the end of l1_rem, so that they can be unified // with exactly the uvar because of the structure of xsdenote try_unifying_remaining l1 (get_head l2 am) am; l1_del `List.Tot.append` l1, l2_del `List.Tot.append` l2, false ) else let rem1, rem2, l1_del', l2_del' = equivalent_lists_once l1 l2 l1_del l2_del am in let n' = List.Tot.length rem1 in if n' >= n then // Should always be smaller or equal to n // If it is equal, no progress was made. fail_atoms rem1 rem2 am else equivalent_lists_fallback n' rem1 rem2 l1_del' l2_del' am /// Iterates over all terms in [l2] to prepare them for unification with SMT rewriting let replace_smt_uvars (l1 l2:list atom) (am:amap term) : Tac (amap term * list term) = let env = cur_env () in fold_left (rewrite_term_for_smt env) (am, []) l2 /// Recursively calls equivalent_lists_once. /// Stops when we're done with unification, or when we didn't make any progress /// If we didn't make any progress, we have too many candidates for some terms. /// Accumulates rewritings of l1 and l2 in l1_del and l2_del, with the invariant /// that the two lists are unifiable at any point /// The boolean indicates if there is a leftover empty frame let rec equivalent_lists' (n:nat) (use_smt:bool) (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * bool * list term) = match l1 with | [] -> begin match l2 with | [] -> (l1_del, l2_del, false, []) | [hd] -> // Succeed if there is only one uvar left in l2, which can be therefore // be unified with emp if is_uvar (select hd am) then ( // xsdenote is left associative: We put hd at the top to get // ?u `star` p <==> emp `star` p (l1_del, hd :: l2_del, true, [])) else fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) | _ -> fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) end | _ -> if is_only_uvar l2 am then ( // Terms left in l1, but only a uvar left in l2. // Put all terms left at the end of l1_rem, so that they can be unified // with exactly the uvar because of the structure of xsdenote try_unifying_remaining l1 (get_head l2 am) am; l1_del `List.Tot.append` l1, l2_del `List.Tot.append` l2, false, [] ) else let rem1, rem2, l1_del', l2_del' = equivalent_lists_once l1 l2 l1_del l2_del am in let n' = List.Tot.length rem1 in if n' >= n then ( // Try to make progress by matching non-uvars of l2 with candidates in l1 let rem1, rem2, l1_del', l2_del' = equivalent_lists_once_l2 rem1 rem2 l1_del' l2_del' am in let n' = List.Tot.length rem1 in if n' >= n then ( // Should always be smaller or equal to n // If it is equal, no progress was made. if use_smt then // SMT fallback is allowed let new_am, uvar_terms = replace_smt_uvars rem1 rem2 am in let l1_f, l2_f, b = equivalent_lists_fallback n' rem1 rem2 l1_del' l2_del' new_am in l1_f, l2_f, b, uvar_terms else fail_atoms rem1 rem2 am ) else equivalent_lists' n' use_smt rem1 rem2 l1_del' l2_del' am ) else equivalent_lists' n' use_smt rem1 rem2 l1_del' l2_del' am /// Checks if term for atom t unifies with fall uvars in l let rec unifies_with_all_uvars (t:term) (l:list atom) (am:amap term) : Tac bool = match l with | [] -> true | hd::tl -> if unifies_with_all_uvars t tl am then ( // Unified with tail, try this term let hd_t = select hd am in if is_uvar hd_t then ( // The head term is a uvar, try unifying try if unify t hd_t then raise Success else raise Failed with | Success -> true | _ -> false ) else true // The uvar is not a head term, we do not need to try it ) else false /// Puts all terms in l1 that cannot unify with the uvars in l2 at the top: /// They need to be solved first let rec most_restricted_at_top (l1 l2:list atom) (am:amap term) : Tac (list atom) = match l1 with | [] -> [] | hd::tl -> if unifies_with_all_uvars (select hd am) l2 am then (most_restricted_at_top tl l2 am) `List.Tot.append` [hd] else hd::(most_restricted_at_top tl l2 am) /// Core AC-unification tactic. /// First remove all trivially equal terms, then try to decide equivalence. /// Assumes that l1 does not contain any vprop uvar. /// If it succeeds, returns permutations of l1, l2, and a boolean indicating /// if l2 has a trailing empty frame to be unified let equivalent_lists (use_smt:bool) (l1 l2:list atom) (am:amap term) : Tac (list atom * list atom * bool * list term) = let l1, l2, l1_del, l2_del = trivial_cancels l1 l2 am in let l1 = most_restricted_at_top l1 l2 am in let n = List.Tot.length l1 in let l1_del, l2_del, emp_frame, uvar_terms = equivalent_lists' n use_smt l1 l2 l1_del l2_del am in l1_del, l2_del, emp_frame, uvar_terms (* Helpers to relate the actual terms to their representation as a list of atoms *) open FStar.Reflection.V2.Derived.Lemmas let rec list_to_string (l:list term) : Tac string = match l with | [] -> "end" | hd::tl -> term_to_string hd ^ " " ^ list_to_string tl let rec mdenote_gen (#a:Type u#aa) (unit:a) (mult:a -> a -> a) (am:amap a) (e:exp) : a = match e with | Unit -> unit | Atom x -> select x am | Mult e1 e2 -> mult (mdenote_gen unit mult am e1) (mdenote_gen unit mult am e2) let rec xsdenote_gen (#a:Type) (unit:a) (mult:a -> a -> a) (am:amap a) (xs:list atom) : a = match xs with | [] -> unit | [x] -> select x am | x::xs' -> mult (select x am) (xsdenote_gen unit mult am xs') unfold let mdenote (#a:Type u#aa) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e:exp) : a = let open FStar.Algebra.CommMonoid.Equiv in mdenote_gen (CM?.unit m) (CM?.mult m) am e unfold let xsdenote (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs:list atom) : a = let open FStar.Algebra.CommMonoid.Equiv in xsdenote_gen (CM?.unit m) (CM?.mult m) am xs // We reimplement List.Tot.Base.append because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_append (#t: Type) (l1 l2: list t) : Pure (list t) (requires True) (ensures (fun res -> res == l1 `List.Tot.append` l2)) (decreases l1) = match l1 with | [] -> l2 | a :: q -> a :: my_append q l2 let rec flatten (e:exp) : list atom = match e with | Unit -> [] | Atom x -> [x] | Mult e1 e2 -> flatten e1 `my_append` flatten e2 let rec flatten_correct_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs1 xs2:list atom) : Lemma (xsdenote eq m am (xs1 `my_append` xs2) `CE.EQ?.eq eq` CE.CM?.mult m (xsdenote eq m am xs1) (xsdenote eq m am xs2)) = let open FStar.Algebra.CommMonoid.Equiv in match xs1 with | [] -> CM?.identity m (xsdenote eq m am xs2); EQ?.symmetry eq (CM?.mult m (CM?.unit m) (xsdenote eq m am xs2)) (xsdenote eq m am xs2) | [x] -> ( if (Nil? xs2) then (right_identity eq m (select x am); EQ?.symmetry eq (CM?.mult m (select x am) (CM?.unit m)) (select x am)) else EQ?.reflexivity eq (CM?.mult m (xsdenote eq m am [x]) (xsdenote eq m am xs2))) | x::xs1' -> flatten_correct_aux eq m am xs1' xs2; EQ?.reflexivity eq (select x am); CM?.congruence m (select x am) (xsdenote eq m am (xs1' `my_append` xs2)) (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2)); CM?.associativity m (select x am) (xsdenote eq m am xs1') (xsdenote eq m am xs2); EQ?.symmetry eq (CM?.mult m (CM?.mult m (select x am) (xsdenote eq m am xs1')) (xsdenote eq m am xs2)) (CM?.mult m (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2))); EQ?.transitivity eq (CM?.mult m (select x am) (xsdenote eq m am (xs1' `my_append` xs2))) (CM?.mult m (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2))) (CM?.mult m (CM?.mult m (select x am) (xsdenote eq m am xs1')) (xsdenote eq m am xs2)) let rec flatten_correct (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e:exp) : Lemma (mdenote eq m am e `CE.EQ?.eq eq` xsdenote eq m am (flatten e)) = let open FStar.Algebra.CommMonoid.Equiv in match e with | Unit -> EQ?.reflexivity eq (CM?.unit m) | Atom x -> EQ?.reflexivity eq (select x am) | Mult e1 e2 -> flatten_correct_aux eq m am (flatten e1) (flatten e2); EQ?.symmetry eq (xsdenote eq m am (flatten e1 `my_append` flatten e2)) (CM?.mult m (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2))); flatten_correct eq m am e1; flatten_correct eq m am e2; CM?.congruence m (mdenote eq m am e1) (mdenote eq m am e2) (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2)); EQ?.transitivity eq (CM?.mult m (mdenote eq m am e1) (mdenote eq m am e2)) (CM?.mult m (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2))) (xsdenote eq m am (flatten e1 `my_append` flatten e2)) let monoid_reflect (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e1 e2:exp) (_ : squash (xsdenote eq m am (flatten e1) `CE.EQ?.eq eq` xsdenote eq m am (flatten e2))) : squash (mdenote eq m am e1 `CE.EQ?.eq eq` mdenote eq m am e2) = flatten_correct eq m am e1; flatten_correct eq m am e2; CE.EQ?.symmetry eq (mdenote eq m am e2) (xsdenote eq m am (flatten e2)); CE.EQ?.transitivity eq (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2)) (mdenote eq m am e2); CE.EQ?.transitivity eq (mdenote eq m am e1) (xsdenote eq m am (flatten e1)) (mdenote eq m am e2) // Here we sort the variable numbers // We reimplement List.Tot.Base.sortWith because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_partition (#a: Type) (f: (a -> Tot bool)) (l: list a) : Pure (list a & list a) (requires True) (ensures (fun res -> res == List.Tot.partition f l)) = match l with | [] -> [], [] | hd::tl -> let l1, l2 = my_partition f tl in if f hd then hd::l1, l2 else l1, hd::l2 let rec partition_ext (#a: Type) (f1 f2: (a -> Tot bool)) (l: list a) : Lemma (requires (forall x . f1 x == f2 x)) (ensures (List.Tot.partition f1 l == List.Tot.partition f2 l)) = match l with | [] -> () | hd::tl -> partition_ext f1 f2 tl let my_bool_of_compare (#a: Type) (f: a -> a -> Tot int) (x: a) (y: a) : Tot bool = f x y < 0 let rec my_sortWith (#a: Type) (f: (a -> a -> Tot int)) (l:list a) : Pure (list a) (requires True) (ensures (fun res -> res == List.Tot.sortWith f l)) (decreases (List.Tot.length l)) = match l with | [] -> [] | pivot::tl -> let hi, lo = my_partition (my_bool_of_compare f pivot) tl in partition_ext (my_bool_of_compare f pivot) (List.Tot.bool_of_compare f pivot) tl; List.Tot.partition_length (List.Tot.bool_of_compare f pivot) tl; my_append (my_sortWith f lo) (pivot::my_sortWith f hi) let rec sortWith_ext (#a: Type) (f1 f2: (a -> a -> Tot int)) (l: list a) : Lemma (requires (forall x y . f1 x y == f2 x y)) (ensures (List.Tot.sortWith f1 l == List.Tot.sortWith f2 l)) (decreases (List.Tot.length l)) = match l with | [] -> () | pivot::tl -> partition_ext (List.Tot.bool_of_compare f1 pivot) (List.Tot.bool_of_compare f2 pivot) tl; List.Tot.partition_length (List.Tot.bool_of_compare f1 pivot) tl; let hi, lo = List.Tot.partition (List.Tot.bool_of_compare f1 pivot) tl in sortWith_ext f1 f2 lo; sortWith_ext f1 f2 hi let permute = list atom -> list atom let my_compare_of_bool (#a:eqtype) (rel: a -> a -> Tot bool) (x: a) (y: a) : Tot int = if x `rel` y then -1 else if x = y then 0 else 1 let sort : permute = my_sortWith #int (my_compare_of_bool (<)) #push-options "--fuel 1 --ifuel 1" let lemma_xsdenote_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (hd:atom) (tl:list atom) : Lemma (xsdenote eq m am (hd::tl) `CE.EQ?.eq eq` (CE.CM?.mult m (select hd am) (xsdenote eq m am tl))) = let open FStar.Algebra.CommMonoid.Equiv in match tl with | [] -> assert (xsdenote eq m am (hd::tl) == select hd am); CM?.identity m (select hd am); EQ?.symmetry eq (CM?.unit m `CM?.mult m` select hd am) (select hd am); CM?.commutativity m (CM?.unit m) (select hd am); EQ?.transitivity eq (xsdenote eq m am (hd::tl)) (CM?.unit m `CM?.mult m` select hd am) (CM?.mult m (select hd am) (xsdenote eq m am tl)) | _ -> EQ?.reflexivity eq (xsdenote eq m am (hd::tl)) let rec partition_equiv (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (pivot:atom) (q:list atom) : Lemma (let open FStar.List.Tot.Base in let hi, lo = partition (bool_of_compare (compare_of_bool (<)) pivot) q in CE.EQ?.eq eq (xsdenote eq m am hi `CE.CM?.mult m` xsdenote eq m am lo) (xsdenote eq m am q)) = let open FStar.Algebra.CommMonoid.Equiv in let open FStar.List.Tot.Base in let f = bool_of_compare (compare_of_bool (<)) pivot in let hi, lo = partition f q in match q with | [] -> CM?.identity m (xsdenote eq m am hi) | hd::tl -> let l1, l2 = partition f tl in partition_equiv eq m am pivot tl; assert (EQ?.eq eq (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (xsdenote eq m am tl)); EQ?.reflexivity eq (xsdenote eq m am l1); EQ?.reflexivity eq (xsdenote eq m am l2); EQ?.reflexivity eq (xsdenote eq m am hi); EQ?.reflexivity eq (xsdenote eq m am lo); if f hd then begin assert (hi == hd::l1 /\ lo == l2); lemma_xsdenote_aux eq m am hd l1; CM?.congruence m (xsdenote eq m am hi) (xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am l1) (xsdenote eq m am l2); CM?.associativity m (select hd am) (xsdenote eq m am l1) (xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) ((select hd am `CM?.mult m` xsdenote eq m am l1) `CM?.mult m` xsdenote eq m am l2) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)); EQ?.reflexivity eq (select hd am); CM?.congruence m (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (select hd am) (xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)) (select hd am `CM?.mult m` xsdenote eq m am tl); lemma_xsdenote_aux eq m am hd tl; EQ?.symmetry eq (xsdenote eq m am (hd::tl)) (select hd am `CM?.mult m` xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am tl) (xsdenote eq m am (hd::tl)) end else begin assert (hi == l1 /\ lo == hd::l2); lemma_xsdenote_aux eq m am hd l2; CM?.congruence m (xsdenote eq m am hi) (xsdenote eq m am lo) (xsdenote eq m am l1) (select hd am `CM?.mult m` xsdenote eq m am l2); CM?.commutativity m (xsdenote eq m am l1) (select hd am `CM?.mult m` xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (xsdenote eq m am l1 `CM?.mult m` (select hd am `CM?.mult m` xsdenote eq m am l2)) ((select hd am `CM?.mult m` xsdenote eq m am l2) `CM?.mult m` xsdenote eq m am l1); CM?.associativity m (select hd am) (xsdenote eq m am l2) (xsdenote eq m am l1); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) ((select hd am `CM?.mult m` xsdenote eq m am l2) `CM?.mult m` xsdenote eq m am l1) (select hd am `CM?.mult m` (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1)); CM?.commutativity m (xsdenote eq m am l2) (xsdenote eq m am l1); EQ?.reflexivity eq (select hd am); CM?.congruence m (select hd am) (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1) (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1)) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)); CM?.congruence m (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (select hd am) (xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)) (select hd am `CM?.mult m` xsdenote eq m am tl); lemma_xsdenote_aux eq m am hd tl; EQ?.symmetry eq (xsdenote eq m am (hd::tl)) (select hd am `CM?.mult m` xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am tl) (xsdenote eq m am (hd::tl)) end let rec sort_correct_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs:list atom) : Lemma (requires True) (ensures xsdenote eq m am xs `CE.EQ?.eq eq` xsdenote eq m am (sort xs)) (decreases (FStar.List.Tot.Base.length xs)) = let open FStar.Algebra.CommMonoid.Equiv in match xs with | [] -> EQ?.reflexivity eq (xsdenote eq m am []) | pivot::q -> let sort0 : permute = List.Tot.sortWith #int (List.Tot.compare_of_bool (<)) in let sort_eq (l: list atom) : Lemma (sort l == sort0 l) [SMTPat (sort l)] = sortWith_ext (my_compare_of_bool (<)) (List.Tot.compare_of_bool (<)) l in let open FStar.List.Tot.Base in let f:int -> int -> int = compare_of_bool (<) in let hi, lo = partition (bool_of_compare f pivot) q in flatten_correct_aux eq m am (sort lo) (pivot::sort hi); assert (xsdenote eq m am (sort xs) `EQ?.eq eq` CM?.mult m (xsdenote eq m am (sort lo)) (xsdenote eq m am (pivot::sort hi))); lemma_xsdenote_aux eq m am pivot (sort hi); EQ?.reflexivity eq (xsdenote eq m am (sort lo)); CM?.congruence m (xsdenote eq m am (sort lo)) (xsdenote eq m am (pivot::sort hi)) (xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` xsdenote eq m am (pivot::sort hi)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)))); CM?.commutativity m (xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)); CM?.associativity m (select pivot am) (xsdenote eq m am (sort hi)) (xsdenote eq m am (sort lo)); EQ?.transitivity eq (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))) ((select pivot am `CM?.mult m` xsdenote eq m am (sort hi)) `CM?.mult m` xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)))); partition_length (bool_of_compare f pivot) q; sort_correct_aux eq m am hi; sort_correct_aux eq m am lo; EQ?.symmetry eq (xsdenote eq m am lo) (xsdenote eq m am (sort lo)); EQ?.symmetry eq (xsdenote eq m am hi) (xsdenote eq m am (sort hi)); CM?.congruence m (xsdenote eq m am (sort hi)) (xsdenote eq m am (sort lo)) (xsdenote eq m am hi) (xsdenote eq m am lo); assert (EQ?.eq eq (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)); EQ?.reflexivity eq (select pivot am); CM?.congruence m (select pivot am) (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)) (select pivot am) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo))); partition_equiv eq m am pivot q; CM?.congruence m (select pivot am) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select pivot am) (xsdenote eq m am q); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)) (select pivot am `CM?.mult m` (xsdenote eq m am q)); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am q))); lemma_xsdenote_aux eq m am pivot q; EQ?.symmetry eq (xsdenote eq m am (pivot::q)) (select pivot am `CM?.mult m` (xsdenote eq m am q)); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am q)) (xsdenote eq m am xs); EQ?.symmetry eq (xsdenote eq m am (sort xs)) (xsdenote eq m am xs) #pop-options #push-options "--fuel 0 --ifuel 0" (* Lemmas to be called after a permutation compatible with AC-unification was found *) let smt_reflexivity (#a:Type) (eq:CE.equiv a) (x y:a) : Lemma (requires x == y) (ensures CE.EQ?.eq eq x y) = CE.EQ?.reflexivity eq x let identity_left_smt (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x y:a) : Lemma (requires x == y) (ensures CE.EQ?.eq eq x (CE.CM?.mult m (CE.CM?.unit m) y)) = CE.CM?.identity m x; CE.EQ?.symmetry eq (CE.CM?.mult m (CE.CM?.unit m) x) x let identity_left (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x:a) : Lemma (CE.EQ?.eq eq x (CE.CM?.mult m (CE.CM?.unit m) x)) = CE.CM?.identity m x; CE.EQ?.symmetry eq (CE.CM?.mult m (CE.CM?.unit m) x) x let identity_right_diff (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x y:a) : Lemma (requires CE.EQ?.eq eq x y) (ensures CE.EQ?.eq eq (CE.CM?.mult m x (CE.CM?.unit m)) y) = CE.right_identity eq m x; CE.EQ?.transitivity eq (CE.CM?.mult m x (CE.CM?.unit m)) x y /// Dismiss possible vprops goals that might have been created by lemma application. /// These vprops will be instantiated at a later stage; else, Meta-F* will raise an error let rec dismiss_slprops () : Tac unit = match term_as_formula' (cur_goal ()) with | App t _ -> if is_fvar t (`%squash) then () else (dismiss(); dismiss_slprops ()) | _ -> dismiss(); dismiss_slprops () /// Recursively removing trailing empty assertions let rec n_identity_left (n:int) (eq m:term) : Tac unit = if n = 0 then ( apply_lemma (`(CE.EQ?.reflexivity (`#eq))); // Cleaning up, in case a uvar has been generated here. It'll be solved later set_goals []) else ( apply_lemma (`identity_right_diff (`#eq) (`#m)); // Drop the slprops generated, they will be solved later dismiss_slprops (); n_identity_left (n-1) eq m ) /// Helper lemma: If two vprops (as represented by lists of atoms) are equivalent, then their canonical forms /// (corresponding to applying the sort function on atoms) are equivalent let equivalent_sorted (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (l1 l2 l1' l2':list atom) : Lemma (requires sort l1 == sort l1' /\ sort l2 == sort l2' /\ xsdenote eq m am l1 `CE.EQ?.eq eq` xsdenote eq m am l2) (ensures xsdenote eq m am l1' `CE.EQ?.eq eq` xsdenote eq m am l2') = let open FStar.Algebra.CommMonoid.Equiv in sort_correct_aux eq m am l1'; sort_correct_aux eq m am l1; EQ?.symmetry eq (xsdenote eq m am l1) (xsdenote eq m am (sort l1)); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am (sort l1')) (xsdenote eq m am l1); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am l1) (xsdenote eq m am l2); sort_correct_aux eq m am l2; EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am l2) (xsdenote eq m am (sort l2)); sort_correct_aux eq m am l2'; EQ?.symmetry eq (xsdenote eq m am l2') (xsdenote eq m am (sort l2')); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am (sort l2)) (xsdenote eq m am l2') #pop-options /// Finds the position of first occurrence of x in xs. /// This is now specialized to terms and their funny term_eq_old. let rec where_aux (n:nat) (x:term) (xs:list term) : Tac (option nat) (decreases xs) = match xs with | [] -> None | x'::xs' -> if term_eq_old x x' then Some n else where_aux (n+1) x xs' let where = where_aux 0 let fatom (t:term) (ts:list term) (am:amap term) : Tac (exp * list term * amap term) = match where t ts with | Some v -> (Atom v, ts, am) | None -> let vfresh = List.Tot.Base.length ts in let t = norm_term [iota; zeta] t in (Atom vfresh, ts `List.Tot.append` [t], update vfresh t am) /// Transforming a term into the corresponding list of atoms /// If the atomic terms were already present in the map [am], then /// they correspond to the same atoms /// This expects that mult, unit, and t have already been normalized let rec reification_aux (ts:list term) (am:amap term) (mult unit t : term) : Tac (exp * list term * amap term) = let hd, tl = collect_app_ref t in match inspect_unascribe hd, List.Tot.Base.list_unref tl with | Tv_FVar fv, [(t1, Q_Explicit) ; (t2, Q_Explicit)] -> if term_eq_old (pack (Tv_FVar fv)) mult then (let (e1, ts, am) = reification_aux ts am mult unit t1 in let (e2, ts, am) = reification_aux ts am mult unit t2 in (Mult e1 e2, ts, am)) else fatom t ts am | _, _ -> if term_eq_old t unit then (Unit, ts, am) else fatom t ts am /// Performs the required normalization before calling the function above let reification (eq: term) (m: term) (ts:list term) (am:amap term) (t:term) : Tac (exp * list term * amap term) = let mult = norm_term [iota; zeta; delta] (`CE.CM?.mult (`#m)) in let unit = norm_term [iota; zeta; delta] (`CE.CM?.unit (`#m)) in let t = norm_term [iota; zeta] t in reification_aux ts am mult unit t /// Meta-F* internal: Transforms the atom map into a term let rec convert_map (m : list (atom * term)) : term = match m with | [] -> `[] | (a, t)::ps -> let a = pack (Tv_Const (C_Int a)) in (* let t = norm_term [delta] t in *) `((`#a, (`#t)) :: (`#(convert_map ps))) /// `am` is an amap (basically a list) of terms, each representing a value /// of type `a` (whichever we are canonicalizing). This functions converts /// `am` into a single `term` of type `amap a`, suitable to call `mdenote` with *) let convert_am (am : amap term) : term = let (map, def) = am in (* let def = norm_term [delta] def in *) `( (`#(convert_map map), `#def) ) /// Transforms a term representatoin into a term through quotation let rec quote_exp (e:exp) : term = match e with | Unit -> (`Unit) | Mult e1 e2 -> (`Mult (`#(quote_exp e1)) (`#(quote_exp e2))) | Atom n -> let nt = pack (Tv_Const (C_Int n)) in (`Atom (`#nt)) let rec quote_atoms (l:list atom) = match l with | [] -> `[] | hd::tl -> let nt = pack (Tv_Const (C_Int hd)) in (`Cons (`#nt) (`#(quote_atoms tl))) /// Some internal normalization steps to make reflection of vprops into atoms and atom permutation go smoothly. /// We reimplemented sorting/list functions to normalize our uses without normalizing those introduced by the user. let normal_tac_steps = [primops; iota; zeta; delta_only [ `%mdenote; `%select; `%my_assoc; `%my_append; `%flatten; `%sort; `%my_sortWith; `%my_partition; `%my_bool_of_compare; `%my_compare_of_bool; `%fst; `%__proj__Mktuple2__item___1; `%snd; `%__proj__Mktuple2__item___2; `%CE.__proj__CM__item__unit; `%CE.__proj__CM__item__mult; `%rm]] /// The normalization function, using the above normalization steps let normal_tac (#a:Type) (x:a) : a = FStar.Pervasives.norm normal_tac_steps x /// Helper lemma to establish relation between normalized and initial values let normal_elim (x:Type0) : Lemma (requires x) (ensures normal_tac x) = () exception Result of list atom * list atom * bool * list term /// F* equalities are typed, but the generated type sometimes is a unification variable. /// This helper ensures that such unification variables are not left unresolved, which would lead to an error let close_equality_typ' (t:term) : Tac unit = let f = term_as_formula' t in match f with | Comp (Eq (Some u)) l _ -> if is_uvar u then (unshelve u; exact_with_ref (tc (cur_env()) l)) | _ -> () /// Recursively closing equality types in a given term (usually a unification constraint) let close_equality_typ (t:term) : Tac unit = visit_tm close_equality_typ' t /// Core unification tactic. /// Transforms terms into their atom representations, /// Tries to find a solution to AC-unification, and if so, /// soundly permutes the atom representations before calling the unifier /// to check the validity of the provided solution. /// In the case where SMT rewriting was needed, equalities abduction is performed by instantiating the /// abduction prop unification variable with the corresponding guard /// 09/24: /// /// The tactic internally builds a map from atoms to terms /// and uses the map for reflecting the goal to atoms representation /// During reflection, the tactics engine typechecks the amap, and hence all /// the terms again /// This typechecking of terms is unnecessary, since the terms are coming /// from the goal, and hence are already well-typed /// Worse, re-typechecking them may generate a lot of SMT queries /// And even worse, the SMT queries are discharged in the static context, /// requiring various workarounds (e.g. squash variables for if conditions etc.) /// /// To fix this, we now "name" the terms and use the amap with names /// /// Read through the canon_l_r function for how we do this /// The following three lemmas are helpers to manipulate the goal in canon_l_r [@@ no_subtyping] let inst_bv (#a:Type) (#p:a -> Type0) (#q:Type0) (x:a) (_:squash (p x ==> q)) : Lemma ((forall (x:a). p x) ==> q) = () let modus_ponens (#p #q:Type0) (_:squash p) : Lemma ((p ==> q) ==> q) = () let cut (p q:Type0) : Lemma (requires p /\ (p ==> q)) (ensures q) = () let and_true (p: Type0) : Lemma (requires (p /\ (p ==> True))) (ensures p) = () let solve_implies_true (p: Type0) : Lemma (p ==> True) = () // This exception is raised for failures that should not be considered // hard but should allow postponing the goal instead exception Postpone of string (* NOTE! Redefining boolean disjunction to *not* be short-circuiting, since we cannot use an effectful result as argument of Prims.op_BarBar *) private let bor = op_BarBar private let is_and (t:term) : bool = is_any_fvar t [`%(/\); `%prop_and] private let is_squash (t:term) : bool = is_any_fvar t [`%squash; `%auto_squash] private let is_star (t:term) : bool = is_any_fvar t [`%star; `%VStar] private let is_star_or_unit (t:term) : bool = is_any_fvar t [`%star; `%VStar; `%VUnit] let rec unify_pr_with_true (pr: term) : Tac unit = let hd, tl = collect_app pr in if is_and hd then match tl with | [pr_l, _; pr_r, _] -> unify_pr_with_true pr_l; unify_pr_with_true pr_r | _ -> fail "unify_pr_with_true: ill-formed /\\" else match inspect_unascribe hd with | Tv_Uvar _ _ -> if unify pr (`true_p) then () else begin fail "unify_pr_with_true: could not unify SMT prop with True" end | _ -> if List.Tot.length (free_uvars pr) = 0 then () else // postpone the goal instead of failing hard, to allow for other goals to solve those uvars raise (Postpone "unify_pr_with_true: some uvars are still there") let elim_and_l_squash (#a #b: Type0) (#goal: Type0) (f: (a -> Tot (squash goal))) (h: (a /\ b)) : Tot (squash goal) = let f' (x: squash a) : Tot (squash goal) = FStar.Squash.bind_squash x f in let elim_impl (x: squash (a /\ b)) : Tot (squash a) = () in f' (elim_impl (FStar.Squash.return_squash h)) let elim_and_r_squash (#a #b: Type0) (#goal: Type0) (f: (b -> Tot (squash goal))) (h: (a /\ b)) : Tot (squash goal) = let f' (x: squash b) : Tot (squash goal) = FStar.Squash.bind_squash x f in let elim_impl (x: squash (a /\ b)) : Tot (squash b) = () in f' (elim_impl (FStar.Squash.return_squash h)) let _return_squash (#a: Type) () (x: a) : Tot (squash a) = FStar.Squash.return_squash x let rec set_abduction_variable_term (pr: term) : Tac term = let hd, tl = collect_app pr in if is_and hd then match tl with | (pr_l, Q_Explicit) :: (pr_r, Q_Explicit) :: [] -> if List.Tot.length (free_uvars pr_r) = 0 then let arg = set_abduction_variable_term pr_l in mk_app (`elim_and_l_squash) [arg, Q_Explicit] else if List.Tot.length (free_uvars pr_l) = 0 then let arg = set_abduction_variable_term pr_r in mk_app (`elim_and_r_squash) [arg, Q_Explicit] else // postpone the goal instead of failing hard, to allow for other goals to solve those uvars raise (Postpone "set_abduction_variable_term: there are still uvars on both sides of l_and") | _ -> fail "set_abduction_variable: ill-formed /\\" else match inspect hd with | Tv_Uvar _ _ -> mk_app (`_return_squash) [`(), Q_Explicit] | _ -> fail "set_abduction_variable: cannot unify" let set_abduction_variable () : Tac unit = let g = cur_goal () in match inspect_unascribe g with | Tv_Arrow b _ -> let pr = b.sort in exact (set_abduction_variable_term pr) | _ -> fail "Not an arrow goal" let canon_l_r (use_smt:bool) (carrier_t:term) //e.g. vprop (eq:term) (m:term) (pr pr_bind:term) (lhs rel rhs:term) : Tac unit = let m_unit = norm_term [iota; zeta; delta] (`(CE.CM?.unit (`#m))) in let m_mult = norm_term [iota; zeta; delta] (`(CE.CM?.mult (`#m))) in let am = const m_unit in (* empty map *) let (r1_raw, ts, am) = reification eq m [] am lhs in let (r2_raw, _, am) = reification eq m ts am rhs in // Encapsulating this in a try/with to avoid spawning uvars for smt_fallback let l1_raw, l2_raw, emp_frame, uvar_terms = try let res = equivalent_lists use_smt (flatten r1_raw) (flatten r2_raw) am in raise (Result res) with | TacticFailure m -> fail m | Result res -> res | _ -> fail "uncaught exception in equivalent_lists" in //So now we have: // am : amap mapping atoms to terms in lhs and rhs // r1_raw : an expression in the atoms language for lhs // r2_raw : an expression in the atoms language for rhs // l1_raw : sorted list of atoms in lhs // l2_raw : sorted list of atoms in rhs // //In particular, r1_raw and r2_raw capture lhs and rhs structurally // (i.e. same associativity, emp, etc.) // //Whereas l1_raw and l2_raw are "canonical" representations of lhs and rhs // (vis xsdenote) //Build an amap where atoms are mapped to names //The type of these names is carrier_t passed by the caller let am_bv : list (atom & namedv & typ) = mapi (fun i (a, _) -> let x = fresh_namedv_named ("x" ^ (string_of_int i)) in (a, x, carrier_t)) (fst am) in let am_bv_term : amap term = map (fun (a, bv, _sort) -> a, pack (Tv_Var bv)) am_bv, snd am in let mdenote_tm (e:exp) : term = mdenote_gen m_unit (fun t1 t2 -> mk_app m_mult [(t1, Q_Explicit); (t2, Q_Explicit)]) am_bv_term e in let xsdenote_tm (l:list atom) : term = xsdenote_gen m_unit (fun t1 t2 -> mk_app m_mult [(t1, Q_Explicit); (t2, Q_Explicit)]) am_bv_term l in //Get the named representations of lhs, rhs, and their respective sorted versions let lhs_named = mdenote_tm r1_raw in let rhs_named = mdenote_tm r2_raw in let sorted_lhs_named = xsdenote_tm l1_raw in let sorted_rhs_named = xsdenote_tm l2_raw in //We now build an auxiliary goal of the form: // // forall xs. (sorted_lhs_named `rel` sorted_rhs_names) ==> (lhs_names `rel` rhs_named) // // where xs are the fresh names that we introduced earlier let mk_rel (l r:term) : term = mk_app rel [(l, Q_Explicit); (r, Q_Explicit)] in let imp_rhs = mk_rel lhs_named rhs_named in let imp_lhs = mk_rel sorted_lhs_named sorted_rhs_named in let imp = mk_app (pack (Tv_FVar (pack_fv imp_qn))) [(imp_lhs, Q_Explicit); (imp_rhs, Q_Explicit)] in //fold over names and quantify over them let aux_goal = fold_right (fun (_, nv, sort) t -> let nvv = inspect_namedv nv in let b = { ppname = nvv.ppname; uniq = nvv.uniq; qual = Q_Explicit; attrs = []; sort = sort; } in let _, t = close_term b t in let t = pack (Tv_Abs b t) in mk_app (pack (Tv_FVar (pack_fv forall_qn))) [t, Q_Explicit]) am_bv imp in //Introduce a cut with the auxiliary goal apply_lemma (`cut (`#aux_goal)); //After the cut, the goal looks like: A /\ (A ==> G) // where A is the auxiliary goal and G is the original goal (lhs `rel` rhs) split (); //Solving A: focus (fun _ -> //The proof follows a similar structure as before naming was introduced // //Except that this time, the amap is in terms of names, // and hence its typechecking is faster and (hopefully) no SMT involved //Open the forall binders in A, and use the fresh names to build an amap let am = fold_left (fun am (a, _, _sort) -> let b = forall_intro () in let v = binding_to_namedv b in (a, pack (Tv_Var v))::am) [] am_bv, snd am in //Introduce the lhs of implication let b = implies_intro () in //Now the proof is the plain old canon proof let am = convert_am am in let r1 = quote_exp r1_raw in let r2 = quote_exp r2_raw in change_sq (`(normal_tac (mdenote (`#eq) (`#m) (`#am) (`#r1) `CE.EQ?.eq (`#eq)` mdenote (`#eq) (`#m) (`#am) (`#r2)))); apply_lemma (`normal_elim); apply (`monoid_reflect ); let l1 = quote_atoms l1_raw in let l2 = quote_atoms l2_raw in apply_lemma (`equivalent_sorted (`#eq) (`#m) (`#am) (`#l1) (`#l2)); if List.Tot.length (goals ()) = 0 then () else begin norm [primops; iota; zeta; delta_only [`%xsdenote; `%select; `%my_assoc; `%my_append; `%flatten; `%sort; `%my_sortWith; `%my_partition; `%my_bool_of_compare; `%my_compare_of_bool; `%fst; `%__proj__Mktuple2__item___1; `%snd; `%__proj__Mktuple2__item___2; `%CE.__proj__CM__item__unit; `%CE.__proj__CM__item__mult; `%rm; `%CE.__proj__EQ__item__eq; `%req; `%star;] ]; //The goal is of the form G1 /\ G2 /\ G3, as in the requires of equivalent_sorted split (); split (); //Solve G1 and G2 by trefl trefl (); trefl (); //G3 is the lhs of the implication in the auxiliary goal // that we have in our assumptions via b apply (`FStar.Squash.return_squash); exact (binding_to_term b) end); dismiss_slprops(); //Our goal now is A ==> G (where G is the original goal (lhs `rel` rhs)) //Open the forall binders ignore (repeatn (List.Tot.length am_bv) (fun _ -> apply_lemma (`inst_bv))); //And apply modus ponens apply_lemma (`modus_ponens); //Now our goal is sorted_lhs_named `rel` sorted_rhs_named // where the names are replaced with fresh uvars (from the repeatn call above) //So we just trefl match uvar_terms with | [] -> // Closing unneeded prop uvar focus (fun _ -> try apply_lemma (`and_true); split (); if emp_frame then apply_lemma (`identity_left (`#eq) (`#m)) else apply_lemma (`(CE.EQ?.reflexivity (`#eq))); unify_pr_with_true pr; // MUST be done AFTER identity_left/reflexivity, which can unify other uvars apply_lemma (`solve_implies_true) with | TacticFailure msg -> fail ("Cannot unify pr with true: " ^ msg) | e -> raise e ) | l -> if emp_frame then ( apply_lemma (`identity_left_smt (`#eq) (`#m)) ) else ( apply_lemma (`smt_reflexivity (`#eq)) ); t_trefl true; close_equality_typ (cur_goal()); revert (); set_abduction_variable () /// Wrapper around the tactic above /// The constraint should be of the shape `squash (equiv lhs rhs)` let canon_monoid (use_smt:bool) (carrier_t:term) (eq m:term) (pr pr_bind:term) : Tac unit = norm [iota; zeta]; let t = cur_goal () in // removing top-level squash application let sq, rel_xy = collect_app_ref t in // unpacking the application of the equivalence relation (lhs `EQ?.eq eq` rhs) (match rel_xy with | [(rel_xy,_)] -> ( let open FStar.List.Tot.Base in let rel, xy = collect_app_ref rel_xy in if (length xy >= 2) then ( match index xy (length xy - 2) , index xy (length xy - 1) with | (lhs, Q_Explicit) , (rhs, Q_Explicit) -> canon_l_r use_smt carrier_t eq m pr pr_bind lhs rel rhs | _ -> fail "Goal should have been an application of a binary relation to 2 explicit arguments" ) else ( fail "Goal should have been an application of a binary relation to n implicit and 2 explicit arguments" ) ) | _ -> fail "Goal should be squash applied to a binary relation") /// Instantiation of the generic AC-unification tactic with the vprop commutative monoid let canon' (use_smt:bool) (pr:term) (pr_bind:term) : Tac unit = canon_monoid use_smt (pack (Tv_FVar (pack_fv [`%vprop]))) (`req) (`rm) pr pr_bind /// Counts the number of unification variables corresponding to vprops in the term [t] let rec slterm_nbr_uvars (t:term) : Tac int = match inspect_unascribe t with | Tv_Uvar _ _ -> 1 | Tv_App _ _ -> let hd, args = collect_app t in if is_star_or_unit hd then // Only count the number of unresolved slprops, not program implicits slterm_nbr_uvars_argv args else if is_uvar hd then 1 else 0 | Tv_Abs _ t -> slterm_nbr_uvars t | _ -> 0 and slterm_nbr_uvars_argv (args: list argv) : Tac int = fold_left (fun n (x, _) -> n + slterm_nbr_uvars x) 0 args let guard_vprop (v: vprop) : Tot vprop = v let rec all_guards_solved (t: term) : Tac bool = match inspect_unascribe t with | Tv_Abs _ t -> all_guards_solved t | Tv_App _ _ -> let hd, args = collect_app t in if hd `is_fvar` (`%guard_vprop) then slterm_nbr_uvars_argv args = 0 else if not (all_guards_solved hd) then false else List.Tot.fold_left (fun (tac: (unit -> Tac bool)) (tm, _) -> let f () : Tac bool = if all_guards_solved tm then tac () else false in f ) (let f () : Tac bool = true in f) args () | _ -> true let unfold_guard () : Tac bool = if all_guards_solved (cur_goal ()) then begin focus (fun _ -> norm [delta_only [(`%guard_vprop)]]); true end else false let rec term_is_uvar (t: term) (i: int) : Tac bool = match inspect t with | Tv_Uvar i' _ -> i = i' | Tv_App _ _ -> let hd, args = collect_app t in term_is_uvar hd i | _ -> false val solve_can_be_split_for : string -> Tot unit val solve_can_be_split_lookup : unit // FIXME: src/reflection/FStar.Reflection.Basic.lookup_attr only supports fvar attributes, so we cannot directly look up for (solve_can_be_split_for blabla), we need a nullary attribute to use with lookup_attr let rec dismiss_all_but_last' (l: list goal) : Tac unit = match l with | [] | [_] -> set_goals l | _ :: q -> dismiss_all_but_last' q let dismiss_all_but_last () : Tac unit = dismiss_all_but_last' (goals ()) let rec dismiss_non_squash_goals' (keep:list goal) (goals:list goal) : Tac unit = match goals with | [] -> set_goals (List.Tot.rev keep) | hd :: tl -> let f = term_as_formula' (goal_type hd) in match f with | App hs _ -> if is_squash hs then dismiss_non_squash_goals' (hd::keep) tl else dismiss_non_squash_goals' keep tl | _ -> dismiss_non_squash_goals' keep tl let dismiss_non_squash_goals () = let g = goals () in dismiss_non_squash_goals' [] g let rec term_mem (te: term) (l: list term) : Tac bool = match l with | [] -> false | t' :: q -> if te `term_eq_old` t' then true else term_mem te q let rec lookup_by_term_attr' (attr: term) (e: env) (found: list fv) (l: list fv) : Tac (list fv) = match l with | [] -> List.Tot.rev found | f :: q -> let n = inspect_fv f in begin match lookup_typ e n with | None -> lookup_by_term_attr' attr e found q | Some se -> let found' = if attr `term_mem` sigelt_attrs se then f :: found else found in lookup_by_term_attr' attr e found' q end let lookup_by_term_attr (label_attr: term) (attr: term) : Tac (list fv) = let e = cur_env () in let candidates = lookup_attr label_attr e in lookup_by_term_attr' attr e [] candidates let rec bring_last_goal_on_top' (others: list goal) (goals: list goal) : Tac unit = match goals with | [] -> set_goals (List.Tot.rev others) | last :: [] -> set_goals (last :: List.Tot.rev others) | a :: q -> bring_last_goal_on_top' (a :: others) q let bring_last_goal_on_top () = let g = goals () in bring_last_goal_on_top' [] g let rec extract_contexts (lemma_left lemma_right label_attr attr: term) (t: term) : Tac (option (unit -> Tac unit)) = let hd, tl = collect_app t in if is_star hd then match tl with | (t_left, Q_Explicit) :: (t_right, Q_Explicit) :: [] -> let extract_right () : Tac (option (unit -> Tac unit)) = match extract_contexts lemma_left lemma_right label_attr attr t_right with | None -> None | Some f -> Some (fun _ -> apply_lemma lemma_right; dismiss_all_but_last (); f () ) in begin match extract_contexts lemma_left lemma_right label_attr attr t_left with | None -> extract_right () | Some f -> Some (fun _ -> try apply_lemma lemma_left; dismiss_all_but_last (); f () with _ -> begin match extract_right () with | None -> fail "no context on the right either" | Some g -> g () end ) end | _ -> None else let candidates = let hd_fv = match inspect_unascribe hd with | Tv_FVar fv -> Some fv | Tv_UInst fv _ -> Some fv | _ -> None in match hd_fv with | None -> [] | Some hd_fv -> let hd_s' = implode_qn (inspect_fv hd_fv) in let hd_s = pack (Tv_Const (C_String hd_s')) in lookup_by_term_attr label_attr (mk_app attr [hd_s, Q_Explicit]) in if Nil? candidates then None else Some (fun _ -> first (List.Tot.map (fun candidate _ -> apply_lemma (pack (Tv_FVar candidate)) <: Tac unit) candidates); dismiss_non_squash_goals () ) let extract_cbs_contexts = extract_contexts (`can_be_split_congr_l) (`can_be_split_congr_r) (`solve_can_be_split_lookup) (`solve_can_be_split_for) let open_existentials () : Tac unit = let e = cur_env () in if Nil? (lookup_attr (`solve_can_be_split_lookup) e) then fail "Tactic disabled: no available lemmas in context"; norm [delta_attr [`%__reduce__]]; let t0 = cur_goal () in match collect_app t0 with | _ (* squash/auto_squash *) , (t1, Q_Explicit) :: [] -> let hd, tl = collect_app t1 in if hd `is_fvar` (`%can_be_split) then match tl with | _ (* lhs *) :: (rhs, Q_Explicit) :: [] -> begin match extract_cbs_contexts rhs with | None -> fail "open_existentials: no context found" | Some f -> apply_lemma (`can_be_split_trans_rev); dismiss_all_but_last (); split (); focus f; bring_last_goal_on_top () // so that any preconditions for the selected lemma are scheduled for later end | _ -> fail "open_existentials: ill-formed can_be_split" else fail "open_existentials: not a can_be_split goal" | _ -> fail "open_existentials: not a squash goal" let try_open_existentials () : Tac bool = focus (fun _ -> try open_existentials (); true with _ -> false ) (* Solving the can_be_split* constraints, if they are ready to be scheduled A constraint is deemed ready to be scheduled if it contains only one vprop unification variable If so, constraints are stripped to their underlying definition based on vprop equivalence, introducing universally quantified variables when needed. Internal details of the encoding are removed through normalization, before calling the AC-unification tactic defined above *) /// Solves a `can_be_split` constraint let rec solve_can_be_split (args:list argv) : Tac bool = match args with | [(t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in try focus (fun _ -> apply_lemma (`equiv_can_be_split); dismiss_slprops(); // If we have exactly the same term on both side, // equiv_sl_implies would solve the goal immediately or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if rnbr = 0 then apply_lemma (`equiv_sym); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true with | _ -> let opened_some = try_open_existentials () in if opened_some then solve_can_be_split args // we only need args for their number of uvars, which has not changed else false ) else false | _ -> false // Ill-formed can_be_split, should not happen /// Solves a can_be_split_dep constraint let solve_can_be_split_dep (args:list argv) : Tac bool = match args with | [(p, _); (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> let p_bind = implies_intro () in apply_lemma (`equiv_can_be_split); dismiss_slprops (); or_else (fun _ -> let b = unify p (`true_p) in if not b then fail "could not unify SMT prop with True"; apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' true p (binding_to_term p_bind))); true ) else false | _ -> fail "ill-formed can_be_split_dep" /// Helper rewriting lemma val emp_unit_variant (p:vprop) : Lemma (ensures can_be_split p (p `star` emp)) /// Solves a can_be_split_forall constraint let solve_can_be_split_forall (args:list argv) : Tac bool = match args with | [_; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> ignore (forall_intro()); apply_lemma (`equiv_can_be_split); dismiss_slprops(); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed can_be_split_forall, should not happen" val solve_can_be_split_forall_dep_for : string -> Tot unit val solve_can_be_split_forall_dep_lookup : unit // FIXME: same as solve_can_be_split_for above let extract_cbs_forall_dep_contexts = extract_contexts (`can_be_split_forall_dep_congr_l) (`can_be_split_forall_dep_congr_r) (`solve_can_be_split_forall_dep_lookup) (`solve_can_be_split_forall_dep_for) let open_existentials_forall_dep () : Tac unit = let e = cur_env () in if Nil? (lookup_attr (`solve_can_be_split_forall_dep_lookup) e) then fail "Tactic disabled: no available lemmas in context"; norm [ delta_only [ `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%rm; ]; iota; delta_attr [`%__reduce__]; ]; let t0 = cur_goal () in match collect_app t0 with | _ (* squash/auto_squash *) , (t1, Q_Explicit) :: [] -> let hd, tl = collect_app t1 in if hd `is_fvar` (`%can_be_split_forall_dep) then match tl with | _ (* cond *) :: _ (* lhs *) :: (rhs, Q_Explicit) :: [] | (_, Q_Implicit) (* #a *) :: _ (* cond *) :: _ (* lhs *) :: (rhs, Q_Explicit) :: [] -> begin match inspect_unascribe rhs with | Tv_Abs _ body -> begin match extract_cbs_forall_dep_contexts body with | None -> fail "open_existentials_forall_dep: no candidate" | Some f -> apply_lemma (`can_be_split_forall_dep_trans_rev); dismiss_all_but_last (); split (); focus f; bring_last_goal_on_top (); if Cons? (goals ()) then norm [] end | _ -> fail "open_existentials_forall_dep : not an abstraction" end | _ -> fail "open_existentials_forall_dep : wrong number of arguments to can_be_split_forall_dep" else fail "open_existentials_forall_dep : not a can_be_split_forall_dep goal" | _ -> fail "open_existentials_forall_dep : not a squash/auto_squash goal" let try_open_existentials_forall_dep () : Tac bool = focus (fun _ -> try open_existentials_forall_dep (); true with _ -> false ) /// Solves a can_be_split_forall_dep constraint let rec solve_can_be_split_forall_dep (args:list argv) : Tac bool = match args with | [_; (pr, _); (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in try focus (fun _ -> norm []; let x = forall_intro () in let pr = mk_app pr [(binding_to_term x, Q_Explicit)] in let p_bind = implies_intro () in apply_lemma (`equiv_can_be_split); or_else (fun _ -> flip()) (fun _ -> ()); let pr = norm_term [] pr in or_else (fun _ -> let b = unify pr (`true_p) in if not b then fail "could not unify SMT prop with True"; apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' true pr (binding_to_term p_bind))); true with | Postpone msg -> false | TacticFailure msg -> let opened = try_open_existentials_forall_dep () in if opened then solve_can_be_split_forall_dep args // we only need args for their number of uvars, which has not changed else fail msg | _ -> fail "Unexpected exception in framing tactic" ) else false | _ -> fail "Ill-formed can_be_split_forall_dep, should not happen" /// Solves an equiv_forall constraint let solve_equiv_forall (args:list argv) : Tac bool = match args with | [_; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> apply_lemma (`equiv_forall_elim); match goals () with | [] -> () | _ -> dismiss_slprops (); ignore (forall_intro()); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed equiv_forall, should not happen" /// Solves an equiv constraint let solve_equiv (args:list argv) : Tac bool = match args with | [(t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip ()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed equiv, should not happen" /// Solves a can_be_split_post constraint let solve_can_be_split_post (args:list argv) : Tac bool = match args with | [_; _; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> norm[]; let g = _cur_goal () in ignore (forall_intro()); apply_lemma (`equiv_forall_elim); match goals () with | [] -> () | _ -> dismiss_slprops (); ignore (forall_intro()); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "ill-formed can_be_split_post" /// Checks whether any of the two terms was introduced during a Steel monadic return let is_return_eq (l r:term) : Tac bool = let nl, al = collect_app l in let nr, ar = collect_app r in is_fvar nl (`%return_pre) || is_fvar nr (`%return_pre) /// Solves indirection equalities introduced by the layered effects framework. /// If these equalities were introduced during a monadic return, they need to be solved /// at a later stage to avoid overly restricting contexts of unification variables let rec solve_indirection_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> if is_return_eq l r then later() else trefl(); solve_indirection_eqs (fuel - 1) | _ -> later(); solve_indirection_eqs (fuel - 1) /// Solve all equalities in the list of goals by calling the F* unifier let rec solve_all_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> trefl(); solve_all_eqs (fuel - 1) | _ -> later(); solve_all_eqs (fuel - 1) /// It is important to not normalize the return_pre eqs goals before unifying /// See test7 in FramingTestSuite for a detailed explanation let rec solve_return_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> trefl(); solve_return_eqs (fuel - 1) | _ -> later(); solve_return_eqs (fuel - 1) /// Strip annotations in a goal, to get to the underlying slprop equivalence let goal_to_equiv (loc:string) : Tac unit = let t = cur_goal () in let f = term_as_formula' t in match f with | App hd0 t -> if not (is_fvar hd0 (`%squash)) then fail (loc ^ " unexpected non-squash goal in goal_to_equiv"); let hd, args = collect_app t in if hd `is_fvar` (`%can_be_split) then ( apply_lemma (`equiv_can_be_split) ) else if hd `is_fvar` (`%can_be_split_forall) then ( ignore (forall_intro ()); apply_lemma (`equiv_can_be_split) ) else if hd `is_fvar` (`%equiv_forall) then ( apply_lemma (`equiv_forall_elim); ignore (forall_intro ()) ) else if hd `is_fvar` (`%can_be_split_post) then ( apply_lemma (`can_be_split_post_elim); dismiss_slprops(); ignore (forall_intro ()); ignore (forall_intro ()) ) else if hd `is_fvar` (`%can_be_split_dep) then ( fail ("can_be_split_dep not supported in " ^ loc) ) else if hd `is_fvar` (`%can_be_split_forall_dep) then ( fail ("can_be_split_forall_dep not supported in " ^ loc) ) else // This should never happen fail (loc ^ " goal in unexpected position") | _ -> fail (loc ^ " unexpected goal") let rec term_dict_assoc (#a: Type) (key: term) (l: list (term & a)) : Tac (list a) = match l with | [] -> [] | (k, v) :: q -> let q' = term_dict_assoc key q in if k `term_eq_old` key then (v :: q') else q' /// Returns true if the goal has been solved, false if it should be delayed let solve_or_delay (dict: list (term & (unit -> Tac bool))) : Tac bool = // Beta-reduce the goal first if possible norm []; let f = term_as_formula' (cur_goal ()) in match f with | App hd0 t -> if is_fvar hd0 (`%squash) then let hd, args = collect_app t in if hd `is_fvar` (`%can_be_split) then solve_can_be_split args else if hd `is_fvar` (`%can_be_split_forall) then solve_can_be_split_forall args else if hd `is_fvar` (`%equiv_forall) then solve_equiv_forall args else if hd `is_fvar` (`%can_be_split_post) then solve_can_be_split_post args else if hd `is_fvar` (`%equiv) then solve_equiv args else if hd `is_fvar` (`%can_be_split_dep) then solve_can_be_split_dep args else if hd `is_fvar` (`%can_be_split_forall_dep) then solve_can_be_split_forall_dep args else let candidates = term_dict_assoc hd dict in let run_tac (tac: unit -> Tac bool) () : Tac bool = focus tac in begin try first (List.Tot.map run_tac candidates) with _ -> (* this is a logical goal, solve it only if it has no uvars *) if List.Tot.length (free_uvars t) = 0 then (smt (); true) else false end else // TODO: handle non-squash goals here false | Comp (Eq _) l r -> let lnbr = List.Tot.length (free_uvars l) in let rnbr = List.Tot.length (free_uvars r) in // Only solve equality if one of the terms is completely determined if lnbr = 0 || rnbr = 0 then (trefl (); true) else false | _ -> false /// Returns true if it successfully solved a goal /// If it returns false, it means it didn't find any solvable goal, /// which should mean only delayed goals are left let rec vprop_term_uvars (t:term) : Tac (list int) = match inspect_unascribe t with | Tv_Uvar i' _ -> [i'] | Tv_App _ _ -> let hd, args = collect_app t in if is_star_or_unit hd then // Only count the number of unresolved slprops, not program implicits argv_uvars args else vprop_term_uvars hd | Tv_Abs _ t -> vprop_term_uvars t | _ -> [] and argv_uvars (args: list argv) : Tac (list int) = let t : unit -> Tac (list int) = fold_left (fun (n: unit -> Tac (list int)) (x, _) -> let t () : Tac (list int) = let l1 = n () in let l2 = vprop_term_uvars x in l1 `List.Tot.append` l2 in t ) (fun _ -> []) args in t () let rec remove_dups_from_sorted (#t: eqtype) (l: list t) : Tot (list t) = match l with | [] | [_] -> l | a1 :: a2 :: q -> if a1 = a2 then remove_dups_from_sorted (a2 :: q) else a1 :: remove_dups_from_sorted (a2 :: q) let simplify_list (l: list int) : Tot (list int) = remove_dups_from_sorted (List.Tot.sortWith (List.Tot.compare_of_bool (<)) l) let goal_term_uvars (t: term) : Tac (list int) = let hd, tl = collect_app t in if hd `is_fvar` (`%squash) then match tl with | [tl0, Q_Explicit] -> let _, tl1 = collect_app tl0 in simplify_list (argv_uvars tl1) | _ -> dump "ill-formed squash"; [] else [] let rec merge_sorted (l1 l2: list int) : Tot (list int) (decreases (List.Tot.length l1 + List.Tot.length l2)) = match l1 with | [] -> l2 | a1 :: q1 -> begin match l2 with | [] -> l1 | a2 :: q2 -> if a1 < a2 then a1 :: merge_sorted q1 l2 else if a2 < a1 then a2 :: merge_sorted l1 q2 else a1 :: merge_sorted q1 q2 end let rec sorted_lists_intersect (l1 l2: list int) : Tot bool (decreases (List.Tot.length l1 + List.Tot.length l2)) = match l1 with | [] -> false | a1 :: q1 -> begin match l2 with | [] -> false | a2 :: q2 -> if a1 = a2 then true else if a1 < a2 then sorted_lists_intersect q1 l2 else sorted_lists_intersect l1 q2 end /// TODO: cache the list of variables for each goal, to avoid computing them several times /// Compute the list of all vprop uvars that appear in the same goal as unsolved guard_vprop let rec compute_guarded_uvars1 (accu: list int) (g: list goal) : Tac (list int) = match g with | [] -> accu | a :: q -> let t = goal_type a in let accu' = if all_guards_solved t then accu else merge_sorted accu (goal_term_uvars t) in compute_guarded_uvars1 accu' q /// Enrich the list of vprop uvars with those that appear in the same goal let rec compute_guarded_uvars2 (accu: list int) (g: list goal) : Tac (list int) = match g with | [] -> accu | a :: q -> let t = goal_type a in let l = goal_term_uvars t in let accu' = if sorted_lists_intersect accu l then merge_sorted accu l else accu in compute_guarded_uvars2 accu' q let rec compute_guarded_uvars3 (accu: list int) (g: list goal) : Tac (list int) = let accu' = compute_guarded_uvars2 accu g in if accu = accu' then accu else compute_guarded_uvars3 accu' g let compute_guarded_uvars () : Tac (list int) = let g = goals () in let accu = compute_guarded_uvars1 [] g in compute_guarded_uvars3 accu g let rec pick_next (guarded_uvars: list int) (dict: _) (fuel: nat) : Tac bool = if fuel = 0 then false else match goals () with | [] -> true | a::_ -> let t = goal_type a in let l = goal_term_uvars t in let next () : Tac bool = later (); pick_next guarded_uvars dict (fuel - 1) in if sorted_lists_intersect guarded_uvars l then next () else if solve_or_delay dict then true else next () /// Main loop to schedule solving of goals. /// The goals () function fetches all current goals in the context let rec resolve_tac (dict: _) : Tac unit = match goals () with | [] -> () | g -> norm []; let guarded_uvars = compute_guarded_uvars () in // TODO: If it picks a goal it cannot solve yet, try all the other ones? if pick_next guarded_uvars dict (List.Tot.length g) then resolve_tac dict else fail "Could not make progress, no solvable goal found" let rec pick_next_logical (dict: _) (fuel: nat) : Tac bool = if fuel = 0 then false else match goals () with | [] -> true | _::_ -> if solve_or_delay dict then true else (later (); pick_next_logical dict (fuel - 1)) /// Special case for logical requires/ensures goals, which correspond only to equalities let rec resolve_tac_logical (dict: _) : Tac unit = match goals () with | [] -> () | g -> let fuel = List.Tot.length g in if pick_next_logical dict fuel then resolve_tac_logical dict else // This is only for requires/ensures constraints, which are equalities // There should always be a scheduling of constraints, but it can happen // that some uvar for the type of an equality is not resolved. // If we reach this point, we try to simply call the unifier instead of failing directly solve_all_eqs fuel /// Determining whether the type represented by term [t] corresponds to one of the logical (requires/ensures) goals let typ_contains_req_ens (t:term) : Tac bool = let name, _ = collect_app t in is_any_fvar name [`%req_t; `%ens_t; `%pure_wp; `%pure_pre; `%pure_post] /// Splits goals between separation logic goals (slgoals) and requires/ensures goals (loggoals)
false
false
Steel.Effect.Common.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val filter_goals (l: list goal) : Tac (list goal * list goal)
[ "recursion" ]
Steel.Effect.Common.filter_goals
{ "file_name": "lib/steel/Steel.Effect.Common.fsti", "git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e", "git_url": "https://github.com/FStarLang/steel.git", "project_name": "steel" }
l: Prims.list FStar.Tactics.Types.goal -> FStar.Tactics.Effect.Tac (Prims.list FStar.Tactics.Types.goal * Prims.list FStar.Tactics.Types.goal)
{ "end_col": 30, "end_line": 3020, "start_col": 2, "start_line": 3002 }
FStar.Tactics.Effect.Tac
val norm_return_pre (fuel: nat) : Tac unit
[ { "abbrev": false, "full_module": "FStar.Reflection.V2.Derived.Lemmas", "short_module": null }, { "abbrev": true, "full_module": "FStar.Algebra.CommMonoid.Equiv", "short_module": "CE" }, { "abbrev": false, "full_module": "FStar.Tactics.CanonCommMonoidSimple.Equiv", "short_module": null }, { "abbrev": false, "full_module": "FStar.Tactics.V2", "short_module": null }, { "abbrev": true, "full_module": "FStar.Tactics.V2", "short_module": "T" }, { "abbrev": false, "full_module": "FStar.Ghost", "short_module": null }, { "abbrev": true, "full_module": "FStar.FunctionalExtensionality", "short_module": "FExt" }, { "abbrev": true, "full_module": "Steel.Memory", "short_module": "Mem" }, { "abbrev": false, "full_module": "Steel.Memory", "short_module": null }, { "abbrev": false, "full_module": "Steel.Effect", "short_module": null }, { "abbrev": false, "full_module": "Steel.Effect", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let rec norm_return_pre (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | _::_ -> norm [delta_only [`%return_pre]]; later(); norm_return_pre (fuel - 1)
val norm_return_pre (fuel: nat) : Tac unit let rec norm_return_pre (fuel: nat) : Tac unit =
true
null
false
if fuel = 0 then () else match goals () with | [] -> () | _ :: _ -> norm [delta_only [`%return_pre]]; later (); norm_return_pre (fuel - 1)
{ "checked_file": "Steel.Effect.Common.fsti.checked", "dependencies": [ "Steel.Memory.fsti.checked", "prims.fst.checked", "FStar.Tactics.V2.fst.checked", "FStar.Tactics.CanonCommMonoidSimple.Equiv.fst.checked", "FStar.String.fsti.checked", "FStar.Squash.fsti.checked", "FStar.Set.fsti.checked", "FStar.Reflection.V2.Derived.Lemmas.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.Base.fst.checked", "FStar.List.Tot.fst.checked", "FStar.Ghost.fsti.checked", "FStar.FunctionalExtensionality.fsti.checked", "FStar.Classical.fsti.checked", "FStar.Algebra.CommMonoid.Equiv.fst.checked" ], "interface_file": false, "source_file": "Steel.Effect.Common.fsti" }
[]
[ "Prims.nat", "Prims.op_Equality", "Prims.int", "Prims.unit", "Prims.bool", "FStar.Tactics.Types.goal", "Prims.list", "Steel.Effect.Common.norm_return_pre", "Prims.op_Subtraction", "FStar.Tactics.V2.Derived.later", "FStar.Tactics.V2.Builtins.norm", "Prims.Cons", "FStar.Pervasives.norm_step", "FStar.Pervasives.delta_only", "Prims.string", "Prims.Nil", "FStar.Tactics.V2.Derived.goals" ]
[]
(* Copyright 2020 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module Steel.Effect.Common open Steel.Memory module Mem = Steel.Memory module FExt = FStar.FunctionalExtensionality open FStar.Ghost /// This module provides various predicates and functions which are common to the /// different Steel effects. /// It also contains the tactic responsible for frame inference through a variant of AC-unification #set-options "--ide_id_info_off" (* Normalization helpers *) irreducible let framing_implicit : unit = () irreducible let __steel_reduce__ : unit = () /// An internal attribute for finer-grained normalization in framing equalities irreducible let __inner_steel_reduce__ : unit = () irreducible let __reduce__ : unit = () irreducible let smt_fallback : unit = () irreducible let ite_attr : unit = () // Needed to avoid some logical vs prop issues during unification with no subtyping [@@__steel_reduce__] unfold let true_p : prop = True module T = FStar.Tactics.V2 let join_preserves_interp (hp:slprop) (m0:hmem hp) (m1:mem{disjoint m0 m1}) : Lemma (interp hp (join m0 m1)) [SMTPat (interp hp (join m0 m1))] = let open Steel.Memory in intro_emp m1; intro_star hp emp m0 m1; affine_star hp emp (join m0 m1) (* Definition of a selector for a given slprop *) /// A selector of type `a` for a separation logic predicate hp is a function /// from a memory where the predicate hp holds, which returns a value of type `a`. /// The effect GTot indicates that selectors are ghost functions, used for specification /// and proof purposes only let selector' (a:Type0) (hp:slprop) = hmem hp -> GTot a /// Self-framing property for selectors let sel_depends_only_on (#a:Type) (#hp:slprop) (sel:selector' a hp) = forall (m0:hmem hp) (m1:mem{disjoint m0 m1}). (interp_depends_only_on hp; ( sel m0 == sel (join m0 m1))) /// Additional property that selectors must satisfy, related to internals of /// the Steel memory model encoding let sel_depends_only_on_core (#a:Type) (#hp:slprop) (sel:selector' a hp) = forall (m0:hmem hp). sel m0 == sel (core_mem m0) /// Full definition of a selector, as a function which satisfies the two predicates above let selector (a:Type) (hp:slprop) : Type = sel:selector' a hp{sel_depends_only_on sel /\ sel_depends_only_on_core sel} /// The basis of our selector framework: Separation logic assertions enhanced with selectors /// Note that selectors are "optional", it is always possible to use a non-informative selector, /// such as fun _ -> () and to rely on the standard separation logic reasoning [@@ erasable] noeq type vprop' = { hp: slprop u#1; t:Type0; sel: selector t hp} (* Lifting the star operator to an inductive type makes normalization and implementing some later functions easier *) [@@__steel_reduce__; erasable] noeq type vprop = | VUnit : vprop' -> vprop | VStar: vprop -> vprop -> vprop (* A generic lift from slprop to vprop with a non-informative selector *) [@@ __steel_reduce__] let to_vprop' (p:slprop) = {hp = p; t = unit; sel = fun _ -> ()} [@@ __steel_reduce__] unfold let to_vprop (p:slprop) = VUnit (to_vprop' p) /// Normalization steps for norm below. /// All functions marked as `unfold`, or with the `__steel_reduce__` attribute will be reduced, /// as well as some functions internal to the selector framework unfold let normal_steps = [delta_attr [`%__steel_reduce__; `%__inner_steel_reduce__]; delta_only [`%Mkvprop'?.t; `%Mkvprop'?.hp; `%Mkvprop'?.sel; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit]; delta_qualifier ["unfold"]; iota;zeta;primops; simplify] /// The core normalization primitive used to simplify Verification Conditions before encoding /// them to an SMT solver. unfold let normal (#a:Type) (x:a) = norm normal_steps x /// An abbreviation for the VStar constructor, allowing to use it with infix notation [@@ __steel_reduce__; __reduce__] let star = VStar /// Extracting the underlying separation logic assertion from a vprop [@@ __steel_reduce__] let rec hp_of (p:vprop) = match p with | VUnit p -> p.hp | VStar p1 p2 -> hp_of p1 `Mem.star` hp_of p2 /// Extracting the selector type from a vprop [@@ __steel_reduce__] let rec t_of (p:vprop) = match p with | VUnit p -> p.t | VStar p1 p2 -> t_of p1 * t_of p2 /// Extracting the selector from a vprop [@@ __steel_reduce__] let rec sel_of (p:vprop) : GTot (selector (t_of p) (hp_of p)) = match p with | VUnit p -> fun h -> p.sel h | VStar p1 p2 -> let sel1 = sel_of p1 in let sel2 = sel_of p2 in fun h -> (sel1 h, sel2 h) /// Type abbreviations for separation logic pre- and postconditions of the Steel effects type pre_t = vprop type post_t (a:Type) = a -> vprop /// An annotation to indicate which separation logic predicates correspond to monadic computations /// These computations are handled in a specific manner in the framing tactic; they correspond to places where /// the context shrinks from all local variables in the computation to variables available at the toplevel let return_pre (p:vprop) : vprop = p noextract let hmem (p:vprop) = hmem (hp_of p) /// Abstract predicate for vprop implication. Currently implemented as an implication on the underlying slprop val can_be_split (p q:pre_t) : Type0 /// Exposing the implementation of `can_be_split` when needed for proof purposes val reveal_can_be_split (_:unit) : Lemma (forall p q. can_be_split p q == Mem.slimp (hp_of p) (hp_of q)) /// A targeted version of the above val can_be_split_interp (r r':vprop) (h:hmem r) : Lemma (requires can_be_split r r') (ensures interp (hp_of r') h) /// A dependent version of can_be_split, to be applied to dependent postconditions let can_be_split_forall (#a:Type) (p q:post_t a) = forall x. can_be_split (p x) (q x) /// A version of can_be_split which is indexed by a proposition, which can be used for equalities abduction let can_be_split_dep (p:prop) (t1 t2:pre_t) = p ==> can_be_split t1 t2 /// A dependent version of the above predicate let can_be_split_forall_dep (#a:Type) (p:a -> prop) (t1 t2:post_t a) = forall (x:a). p x ==> can_be_split (t1 x) (t2 x) (* Some lemmas about the can_be_split* predicates, to be used as rewriting rules for the abstract predicates *) val can_be_split_trans (p q r:vprop) : Lemma (requires p `can_be_split` q /\ q `can_be_split` r) (ensures p `can_be_split` r) let can_be_split_trans_rev (p q r:vprop) : Lemma (requires q `can_be_split` r /\ p `can_be_split` q) (ensures p `can_be_split` r) = can_be_split_trans p q r val can_be_split_star_l (p q:vprop) : Lemma (ensures (p `star` q) `can_be_split` p) [SMTPat ((p `star` q) `can_be_split` p)] val can_be_split_star_r (p q:vprop) : Lemma (ensures (p `star` q) `can_be_split` q) [SMTPat ((p `star` q) `can_be_split` q)] val can_be_split_refl (p:vprop) : Lemma (p `can_be_split` p) [SMTPat (p `can_be_split` p)] val can_be_split_congr_l (p q r: vprop) : Lemma (requires (p `can_be_split` q)) (ensures ((p `star` r) `can_be_split` (q `star` r))) val can_be_split_congr_r (p q r: vprop) : Lemma (requires (p `can_be_split` q)) (ensures ((r `star` p) `can_be_split` (r `star` q))) let prop_and (p1 p2: prop) : Tot prop = p1 /\ p2 let can_be_split_forall_dep_trans_rev (#a: Type) (cond1 cond2: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond2 q r /\ can_be_split_forall_dep cond1 p q)) (ensures (can_be_split_forall_dep (fun x -> cond1 x `prop_and` cond2 x) p r)) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_trans x y) z) let can_be_split_forall_dep_congr_l (#a: Type) (cond: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond p q)) (ensures (can_be_split_forall_dep cond (fun x -> p x `star` r x) (fun x -> q x `star` r x))) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_congr_l x y) z) let can_be_split_forall_dep_congr_r (#a: Type) (cond: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond p q)) (ensures (can_be_split_forall_dep cond (fun x -> r x `star` p x) (fun x -> r x `star` q x))) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_congr_r x y) z) /// To simplify the implementation of the framing tactic, dependent equivalence /// is defined as a double dependent implication let equiv_forall (#a:Type) (t1 t2:post_t a) : Type0 = t1 `can_be_split_forall` t2 /\ t2 `can_be_split_forall` t1 /// This equivalence models a context restriction at the end of a Steel computation; /// note that t2 does not depend on the value of type `a`, but the two vprops must be /// equivalent let can_be_split_post (#a #b:Type) (t1:a -> post_t b) (t2:post_t b) = forall (x:a). equiv_forall (t1 x) t2 /// Lifting the equivalence relation to vprops. Two vprops are equivalent if the underlying slprops /// are equivalent val equiv (p q:vprop) : prop /// Revealing the definition of vprop equivalence when needed for proof purposes. /// In other cases, the predicate is abstract val reveal_equiv (p q:vprop) : Lemma (p `equiv` q <==> hp_of p `Mem.equiv` hp_of q) (* A restricted view of the heap, that only allows to access selectors of the current slprop *) let rmem' (pre:vprop) = FExt.restricted_g_t (r0:vprop{can_be_split pre r0}) (fun r0 -> normal (t_of r0)) /// Ensuring that rmems encapsulate the structure induced by the separation logic star val valid_rmem (#frame:vprop) (h:rmem' frame) : prop unfold let rmem (pre:vprop) = h:rmem' pre{valid_rmem h} /// Exposing the definition of mk_rmem to better normalize Steel VCs unfold noextract let unrestricted_mk_rmem (r:vprop) (h:hmem r) = fun (r0:vprop{r `can_be_split` r0}) -> can_be_split_interp r r0 h; sel_of r0 h [@@ __inner_steel_reduce__] noextract let mk_rmem' (r:vprop) (h:hmem r) : Tot (rmem' r) = FExt.on_dom_g (r0:vprop{r `can_be_split` r0}) (unrestricted_mk_rmem r h) val lemma_valid_mk_rmem (r:vprop) (h:hmem r) : Lemma (valid_rmem (mk_rmem' r h)) [@@ __inner_steel_reduce__] noextract let mk_rmem (r:vprop) (h:hmem r) : Tot (rmem r) = lemma_valid_mk_rmem r h; mk_rmem' r h val reveal_mk_rmem (r:vprop) (h:hmem r) (r0:vprop{r `can_be_split` r0}) : Lemma (ensures reveal_can_be_split(); (mk_rmem r h) r0 == sel_of r0 h) (* Logical pre and postconditions can only access the restricted view of the heap *) type req_t (pre:pre_t) = rmem pre -> Type0 type ens_t (pre:pre_t) (a:Type) (post:post_t a) = rmem pre -> (x:a) -> rmem (post x) -> Type0 (* Empty assertion *) val emp : vprop /// When needed for proof purposes, the empty assertion is a direct lift of the /// empty assertion from Steel.Memory val reveal_emp (_:unit) : Lemma (hp_of emp == Mem.emp /\ t_of emp == unit) /// Lifting pure predicates to vprop [@@__steel_reduce__] unfold let pure (p:prop) = to_vprop (pure p) /// Framing predicates for the Steel effect. If the current computation has already /// been framed, then the additional frame is the empty predicate let maybe_emp (framed:bool) (frame:pre_t) = if framed then frame == emp else True /// Dependent version of the above predicate, usable in dependent postconditions let maybe_emp_dep (#a:Type) (framed:bool) (frame:post_t a) = if framed then (forall x. frame x == emp) else True (* focus_rmem is an additional restriction of our view of memory. We expose it here to be able to reduce through normalization; Any valid application of focus_rmem h will be reduced to the application of h *) [@@ __steel_reduce__] unfold let unrestricted_focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) = fun (r':vprop{can_be_split r0 r'}) -> can_be_split_trans r r0 r'; h r' [@@ __inner_steel_reduce__] let focus_rmem' (#r: vprop) (h: rmem r) (r0: vprop{r `can_be_split` r0}) : Tot (rmem' r0) = FExt.on_dom_g (r':vprop{can_be_split r0 r'}) (unrestricted_focus_rmem h r0) val lemma_valid_focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) : Lemma (valid_rmem (focus_rmem' h r0)) [@@ __inner_steel_reduce__] let focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) : Tot (rmem r0) = lemma_valid_focus_rmem h r0; focus_rmem' h r0 /// Exposing that calling focus_rmem on the current context corresponds to an equality let focus_rmem_refl (r:vprop) (h:rmem r) : Lemma (focus_rmem #r h r == h) = FStar.FunctionalExtensionality.extensionality_g _ _ (focus_rmem #r h r) h open FStar.Tactics.V2 /// State that all "atomic" subresources have the same selectors on both views. /// The predicate has the __steel_reduce__ attribute, ensuring that VC normalization /// will reduce it to a conjunction of equalities on atomic subresources /// This predicate is also marked as `strict_on_arguments` on [frame], ensuring that /// it will not be reduced when the frame is symbolic /// Instead, the predicate will be rewritten to an equality using `lemma_frame_equalities` below [@@ __steel_reduce__; strict_on_arguments [0]] let rec frame_equalities' (frame:vprop) (h0:rmem frame) (h1:rmem frame) : Type0 = begin match frame with | VUnit p -> h0 frame == h1 frame | VStar p1 p2 -> can_be_split_star_l p1 p2; can_be_split_star_r p1 p2; let h01 = focus_rmem h0 p1 in let h11 = focus_rmem h1 p1 in let h02 = focus_rmem h0 p2 in let h12 = focus_rmem h1 p2 in frame_equalities' p1 h01 h11 /\ frame_equalities' p2 h02 h12 end /// This lemma states that frame_equalities is the same as an equality on the top-level frame. /// The uncommon formulation with an extra [p] is needed to use in `rewrite_with_tactic`, /// where the goal is of the shape `frame_equalities frame h0 h1 == ?u` /// The rewriting happens below, in `frame_vc_norm` val lemma_frame_equalities (frame:vprop) (h0:rmem frame) (h1:rmem frame) (p:Type0) : Lemma (requires (h0 frame == h1 frame) == p) (ensures frame_equalities' frame h0 h1 == p) /// A special case for frames about emp. val lemma_frame_emp (h0:rmem emp) (h1:rmem emp) (p:Type0) : Lemma (requires True == p) (ensures frame_equalities' emp h0 h1 == p) /// A variant of conjunction elimination, suitable to the equality goals during rewriting val elim_conjunction (p1 p1' p2 p2':Type0) : Lemma (requires p1 == p1' /\ p2 == p2') (ensures (p1 /\ p2) == (p1' /\ p2')) /// Normalization and rewriting step for generating frame equalities. /// The frame_equalities function has the strict_on_arguments attribute on the [frame], /// ensuring that it is not reduced when the frame is symbolic. /// When that happens, we want to replace frame_equalities by an equality on the frame, /// mimicking reduction [@@plugin] let frame_vc_norm () : Tac unit = with_compat_pre_core 0 (fun _ -> // Do not normalize mk_rmem/focus_rmem to simplify application of // the reflexivity lemma on frame_equalities' norm [delta_attr [`%__steel_reduce__]; delta_only [`%Mkvprop'?.t; `%Mkvprop'?.hp; `%Mkvprop'?.sel; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit]; delta_qualifier ["unfold"]; iota;zeta;primops; simplify]; // After reduction, the term to rewrite might be of the shape // (frame_equalities' ... /\ frame_equalities' .. /\ ...) == ?u, // with some frame_equalities' possibly already fully reduced // We repeatedly split the clause and extract the term on the left // to generate equalities on atomic subresources ignore (repeat (fun _ -> // Try to split the conjunction. If there is no conjunction, we exit the repeat apply_lemma (`elim_conjunction); // Dismiss the two uvars created for the RHS, they'll be solved by unification dismiss (); dismiss (); // The first goal is the left conjunction split (); // Removes the frame equality if it is about emp or_else (fun _ -> apply_lemma (`lemma_frame_emp); dismiss()) (fun _ -> ()); // Rewrites the frame_equalities if it wasn't yet reduced or_else (fun _ -> apply_lemma (`lemma_frame_equalities); dismiss ()) (fun _ -> ()); norm normal_steps; // Finally solve the uvar, finishing the rewriting for this clause trefl () )); // Removes the frame equality if it is about emp or_else (fun _ -> apply_lemma (`lemma_frame_emp); dismiss()) (fun _ -> ()); // We do not have conjunctions anymore, we try to apply the frame_equalities rewriting // If it fails, the frame was not symbolic, so there is nothing to do or_else (fun _ -> apply_lemma (`lemma_frame_equalities); dismiss ()) (fun _ -> ()); norm normal_steps; trefl ()) [@@ __steel_reduce__] unfold let frame_equalities (frame:vprop) (h0:rmem frame) (h1:rmem frame) : prop = rewrite_with_tactic frame_vc_norm (frame_equalities' frame h0 h1) /// More lemmas about the abstract can_be_split predicates, to be used as /// rewriting rules in the tactic below val can_be_split_dep_refl (p:vprop) : Lemma (can_be_split_dep true_p p p) val equiv_can_be_split (p1 p2:vprop) : Lemma (requires p1 `equiv` p2) (ensures p1 `can_be_split` p2) val intro_can_be_split_frame (p q:vprop) (frame:vprop) : Lemma (requires q `equiv` (p `star` frame)) (ensures can_be_split q p /\ True) val can_be_split_post_elim (#a #b:Type) (t1:a -> post_t b) (t2:post_t b) : Lemma (requires (forall (x:a) (y:b). t1 x y `equiv` t2 y)) (ensures t1 `can_be_split_post` t2) val equiv_forall_refl (#a:Type) (t:post_t a) : Lemma (t `equiv_forall` t) val equiv_forall_elim (#a:Type) (t1 t2:post_t a) : Lemma (requires (forall (x:a). t1 x `equiv` t2 x)) (ensures t1 `equiv_forall` t2) open FStar.Tactics.CanonCommMonoidSimple.Equiv (* equiv is an equivalence relation on vprops *) /// Lemmas establishing the equivalence properties on equiv val equiv_refl (x:vprop) : Lemma (equiv x x) val equiv_sym (x y:vprop) : Lemma (requires equiv x y) (ensures equiv y x) val equiv_trans (x y z:vprop) : Lemma (requires equiv x y /\ equiv y z) (ensures equiv x z) module CE = FStar.Algebra.CommMonoid.Equiv /// Equiv is an equivalence relation for vprops elements inline_for_extraction noextract let req : CE.equiv vprop = CE.EQ equiv equiv_refl equiv_sym equiv_trans (* Star induces a commutative monoid for the equiv equivalence relation *) /// Lemmas establishing the commutative monoid properties val cm_identity (x:vprop) : Lemma ((emp `star` x) `equiv` x) val star_commutative (p1 p2:vprop) : Lemma ((p1 `star` p2) `equiv` (p2 `star` p1)) val star_associative (p1 p2 p3:vprop) : Lemma (((p1 `star` p2) `star` p3) `equiv` (p1 `star` (p2 `star` p3))) val star_congruence (p1 p2 p3 p4:vprop) : Lemma (requires p1 `equiv` p3 /\ p2 `equiv` p4) (ensures (p1 `star` p2) `equiv` (p3 `star` p4)) /// Star induces a commutative monoid on vprops [@__steel_reduce__] inline_for_extraction noextract let rm : CE.cm vprop req = CE.CM emp star cm_identity star_associative star_commutative star_congruence (*** Vprop combinators ***) (* Refining a vprop with a selector predicate *) /// Separation logic predicate stating the validity of a vprop with an additional refinement on its selector val vrefine_hp (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot (slprop u#1) /// Exposing the validity of the above predicate when needed for proof purposes val interp_vrefine_hp (v: vprop) (p: (normal (t_of v) -> Tot prop)) (m: mem) : Lemma (interp (vrefine_hp v p) m <==> (interp (hp_of v) m /\ p (sel_of v m))) /// Selector type for a refined vprop [@__steel_reduce__] let vrefine_t (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot Type = (x: t_of v {p x}) /// Selector of a refined vprop. Returns a value which satisfies the refinement predicate val vrefine_sel (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot (selector (vrefine_t v p) (vrefine_hp v p)) /// Exposing the definition of the refined selector val vrefine_sel_eq (v: vprop) (p: (normal (t_of v) -> Tot prop)) (m: Mem.hmem (vrefine_hp v p)) : Lemma ( interp (hp_of v) m /\ vrefine_sel v p m == sel_of v m ) // [SMTPat ((vrefine_sel v p) m)] // FIXME: this pattern causes Z3 "wrong number of argument" errors /// Combining the above pieces to define a vprop refined by a selector prediacte [@__steel_reduce__] let vrefine' (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot vprop' = { hp = vrefine_hp v p; t = vrefine_t v p; sel = vrefine_sel v p; } [@__steel_reduce__] let vrefine (v: vprop) (p: (normal (t_of v) -> Tot prop)) = VUnit (vrefine' v p) (* Dependent star for vprops *) /// Separation logic predicate corresponding to a dependent star, /// where the second predicate depends on the selector value of the first val vdep_hp (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot (slprop u#1) /// Exposing the validity of the above predicate when needed for proof purposes val interp_vdep_hp (v: vprop) (p: ( (t_of v) -> Tot vprop)) (m: mem) : Lemma (interp (vdep_hp v p) m <==> (interp (hp_of v) m /\ interp (hp_of v `Mem.star` hp_of (p (sel_of v m))) m)) /// Helper to define the selector type of the second component of the dependent star let vdep_payload (v: vprop) (p: ( (t_of v) -> Tot vprop)) (x: t_of v) : Tot Type = t_of (p x) /// Selector type for the dependent star: A dependent tuple, where the second component's type depends on the first vprop let vdep_t (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot Type = dtuple2 (t_of v) (vdep_payload v p) /// Selector for the dependent star val vdep_sel (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot (selector (vdep_t v p) (vdep_hp v p)) /// Exposing the definition of the dependent star's selector when needed for proof purposes val vdep_sel_eq (v: vprop) (p: ( (t_of v) -> Tot vprop)) (m: Mem.hmem (vdep_hp v p)) : Lemma ( interp (hp_of v) m /\ begin let x = sel_of v m in interp (hp_of (p x)) m /\ vdep_sel v p m == (| x, sel_of (p x) m |) end ) /// Combining the elements above to create a dependent star vprop [@__steel_reduce__] let vdep' (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot vprop' = { hp = vdep_hp v p; t = vdep_t v p; sel = vdep_sel v p; } [@__steel_reduce__] let vdep (v: vprop) (p: ( (t_of v) -> Tot vprop)) = VUnit (vdep' v p) (* Selector rewrite combinator *) /// The selector of a rewrite combinator applies a function `f` to the current selector of a vprop. val vrewrite_sel (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot (selector t (normal (hp_of v))) /// Exposing the definition of the above selector val vrewrite_sel_eq (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) (h: Mem.hmem (normal (hp_of v))) : Lemma ((vrewrite_sel v f <: selector' _ _) h == f ((normal (sel_of v) <: selector' _ _) h)) // [SMTPat (vrewrite_sel v f h)] // FIXME: this pattern causes Z3 "wrong number of argument" errors /// Combining the above elements to create a rewrite vprop [@__steel_reduce__] let vrewrite' (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot vprop' = { hp = normal (hp_of v); t = t; sel = vrewrite_sel v f; } [@__steel_reduce__] let vrewrite (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot vprop = VUnit (vrewrite' v f) (*** Framing tactic ***) (* Specialize visit_tm from the standard F* tactic library to reimplement name_appears_in. AF: As of Jan 14, 2021, calling name_appears_in from FStar.Tactics.Derived leads to a segfault *) exception Appears let on_sort_binder (f : term -> Tac unit) (b:binder) : Tac unit = f b.sort let rec visit_tm (ff : term -> Tac unit) (t : term) : Tac unit = let tv = inspect t in (match tv with | Tv_FVar _ | Tv_UInst _ _ | Tv_Var _ | Tv_BVar _ -> () | Tv_Type _ -> () | Tv_Const c -> () | Tv_Uvar i u -> () | Tv_Unsupp -> () | Tv_Unknown -> () | Tv_Arrow b c -> on_sort_binder ff b; visit_comp ff c | Tv_Abs b t -> let b = on_sort_binder (visit_tm ff) b in visit_tm ff t | Tv_App l (r, q) -> visit_tm ff l; visit_tm ff r | Tv_Refine b r -> on_sort_binder ff b; visit_tm ff r | Tv_Let r attrs b def t -> on_sort_binder ff b; visit_tm ff def; visit_tm ff t | Tv_Match sc _ brs -> visit_tm ff sc; iter (visit_br ff) brs | Tv_AscribedT e t topt _ -> visit_tm ff e; visit_tm ff t | Tv_AscribedC e c topt _ -> visit_tm ff e ); ff t and visit_br (ff : term -> Tac unit) (b:branch) : Tac unit = let (p, t) = b in visit_tm ff t and visit_comp (ff : term -> Tac unit) (c : comp) : Tac unit = let cv = inspect_comp c in match cv with | C_Total ret -> visit_tm ff ret | C_GTotal ret -> visit_tm ff ret | C_Lemma pre post pats -> visit_tm ff pre; visit_tm ff post; visit_tm ff pats | C_Eff us eff res args decrs -> visit_tm ff res; iter (fun (a, q) -> visit_tm ff a) args; iter (visit_tm ff) decrs /// Decides whether a top-level name [nm] syntactically /// appears in the term [t]. let name_appears_in (nm:name) (t:term) : Tac bool = let ff (t : term) : Tac unit = match inspect t with | Tv_FVar fv -> if inspect_fv fv = nm then raise Appears | t -> () in try ignore (visit_tm ff t); false with | Appears -> true | e -> raise e /// Checks whether term [t] appears in term [i] let term_appears_in (t:term) (i:term) : Tac bool = name_appears_in (explode_qn (term_to_string t)) i /// We define a small language to handle arbitrary separation logic predicates. /// Separation logic predicates are encoded as atoms for which equality is decidable, /// here represented as integers let atom : eqtype = int let rec atoms_to_string (l:list atom) = match l with | [] -> "" | hd::tl -> string_of_int hd ^ " " ^ atoms_to_string tl /// Reflecting the structure of our separation logic on atmos type exp : Type = | Unit : exp | Mult : exp -> exp -> exp | Atom : atom -> exp /// A map from atoms to the terms they represent. /// The second component of the term corresponds to a default element, /// ensuring we never raise an exception when trying to access an element in the map let amap (a:Type) = list (atom * a) * a /// An empty atom map: The list map is empty let const (#a:Type) (xa:a) : amap a = ([], xa) /// Accessing an element in the atom map // We reimplement List.Tot.Base.assoc because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_assoc (#key: eqtype) (#value: Type) (k: key) (dict: list (key & value)) : Pure (option value) (requires True) (ensures (fun res -> res == List.Tot.assoc k dict)) = match dict with | [] -> None | (k', v') :: q -> if k = k' then Some v' else my_assoc k q let select (#a:Type) (x:atom) (am:amap a) : Tot a = match my_assoc #atom #a x (fst am) with | Some a -> a | _ -> snd am /// Updating the atom map. Since select finds the first element corresponding to /// the atom in the list and we do not have any remove function, /// we can simply append the new element at the head without removing any possible /// previous element let update (#a:Type) (x:atom) (xa:a) (am:amap a) : amap a = (x, xa)::fst am, snd am /// Check whether the current term is an unresolved vprop unification variable. /// This can happen if either it is a uvar, or it is an unresolved dependent /// vprop uvar which is applied to some argument let is_uvar (t:term) : Tac bool = match inspect t with | Tv_Uvar _ _ -> true | Tv_App _ _ -> let hd, args = collect_app t in Tv_Uvar? (inspect hd) | _ -> false /// For a given term t, collect all terms in the list l with the same head symbol let rec get_candidates (t:term) (l:list term) : Tac (list term) = let name, _ = collect_app t in match l with | [] -> [] | hd::tl -> let n, _ = collect_app hd in if term_eq_old n name then ( hd::(get_candidates t tl) ) else get_candidates t tl /// Try to remove a term that is exactly matching, not just that can be unified let rec trivial_cancel (t:atom) (l:list atom) = match l with | [] -> false, l | hd::tl -> if hd = t then // These elements match, we remove them true, tl else (let b, res = trivial_cancel t tl in b, hd::res) /// Call trivial_cancel on all elements of l1. /// The first two lists returned are the remainders of l1 and l2. /// The last two lists are the removed parts of l1 and l2, with /// the additional invariant that they are equal let rec trivial_cancels (l1 l2:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l1 with | [] -> [], l2, [], [] | hd::tl -> let b, l2' = trivial_cancel hd l2 in let l1', l2', l1_del, l2_del = trivial_cancels tl l2' am in (if b then l1' else hd::l1'), l2', (if b then hd::l1_del else l1_del), (if b then hd::l2_del else l2_del) exception Failed exception Success /// Helper to print the terms corresponding to the current list of atoms let rec print_atoms (l:list atom) (am:amap term) : Tac string = match l with | [] -> "" | [hd] -> term_to_string (select hd am) | hd::tl -> term_to_string (select hd am) ^ " * " ^ print_atoms tl am /// For a list of candidates l, count the number that can unify with t. /// Does not try to unify with a uvar, this will be done at the very end. /// Tries to unify with slprops with a different head symbol, it might /// be an abbreviation let rec try_candidates (t:atom) (l:list atom) (am:amap term) : Tac (atom * int) = match l with | [] -> t, 0 | hd::tl -> if is_uvar (select hd am) then (try_candidates t tl am) else // Encapsulate unify in a try/with to ensure unification is not actually performed let res = try if unify (select t am) (select hd am) then raise Success else raise Failed with | Success -> true | _ -> false in let t', n' = try_candidates t tl am in if res && hd <> t' then hd, 1 + n' else t', n' /// Remove the given term from the list. Only to be called when /// try_candidates succeeded let rec remove_from_list (t:atom) (l:list atom) : Tac (list atom) = match l with | [] -> fail "atom in remove_from_list not found: should not happen"; [] | hd::tl -> if t = hd then tl else hd::remove_from_list t tl /// Check if two lists of slprops are equivalent by recursively calling /// try_candidates. /// Assumes that only l2 contains terms with the head symbol unresolved. /// It returns all elements that were not resolved during this iteration *) let rec equivalent_lists_once (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l1 with | [] -> [], l2, l1_del, l2_del | hd::tl -> let t, n = try_candidates hd l2 am in if n = 1 then ( let l2 = remove_from_list t l2 in equivalent_lists_once tl l2 (hd::l1_del) (t::l2_del) am ) else ( // Either too many candidates for this scrutinee, or no candidate but the uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once tl l2 l1_del l2_del am in hd::rem1, rem2, l1'_del, l2'_del ) /// Check if two lists of slprops are equivalent by recursively calling /// try_candidates by iterating on l2. /// Assumes that only l2 contains terms with the head symbol unresolved. /// It returns all elements that were not resolved during this iteration *) /// This is very close to equivalent_lists_once above, but helps making progress /// when l1 contains syntactically equal candidates let rec equivalent_lists_once_l2 (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l2 with | [] -> l1, [], l1_del, l2_del | hd::tl -> if is_uvar (select hd am) then // We do not try to match the vprop uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once_l2 l1 tl l1_del l2_del am in rem1, hd::rem2, l1'_del, l2'_del else ( let t, n = try_candidates hd l1 am in if n = 1 then ( let l1 = remove_from_list t l1 in equivalent_lists_once_l2 l1 tl (t::l1_del) (hd::l2_del) am ) else ( // Either too many candidates for this scrutinee, or no candidate but the uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once_l2 l1 tl l1_del l2_del am in rem1, hd::rem2, l1'_del, l2'_del ) ) let get_head (l:list atom) (am:amap term) : term = match l with | [] -> `() | hd::_ -> select hd am /// Checks whether the list of atoms [l] only contains one unresolved uvar let is_only_uvar (l:list atom) (am:amap term) : Tac bool = if List.Tot.Base.length l = 1 then is_uvar (select (List.Tot.Base.hd l) am) else false /// Assumes that u is a uvar, checks that all variables in l can be unified with it. /// Later in the tactic, the uvar will be unified to a star of l *) let rec try_unifying_remaining (l:list atom) (u:term) (am:amap term) : Tac unit = match l with | [] -> () | hd::tl -> try if unify u (select hd am) then raise Success else raise Failed with | Success -> try_unifying_remaining tl u am | _ -> fail ("could not find candidate for scrutinee " ^ term_to_string (select hd am)) /// Is SMT rewriting enabled for this binder let is_smt_binder (b:binder) : Tac bool = let l = b.attrs in not (List.Tot.isEmpty (filter (fun t -> is_fvar t (`%smt_fallback)) l)) /// Creates a new term, where all arguments where SMT rewriting is enabled have been replaced /// by fresh, unconstrained unification variables let rec new_args_for_smt_attrs (env:env) (l:list argv) (ty:typ) : Tac (list argv * list term) = let fresh_ghost_uvar ty = let e = cur_env () in ghost_uvar_env e ty in match l, inspect_unascribe ty with | (arg, aqualv)::tl, Tv_Arrow binder comp -> let needs_smt = is_smt_binder binder in let new_hd = if needs_smt then ( let arg_ty = tc env arg in let uvar = fresh_ghost_uvar arg_ty in unshelve uvar; flip (); (uvar, aqualv) ) else (arg, aqualv) in begin let ty2 = match inspect_comp comp with | C_Total ty2 -> ty2 | C_Eff _ eff_name ty2 _ _ -> if eff_name = ["Prims"; "Tot"] then ty2 else fail "computation type not supported in definition of slprops" | _ -> fail "computation type not supported in definition of slprops" in let tl_argv, tl_terms = new_args_for_smt_attrs env tl ty2 in new_hd::tl_argv, (if needs_smt then arg::tl_terms else tl_terms) end | [], Tv_FVar fv -> [], [] | _ -> fail "should not happen. Is an slprop partially applied?" /// Rewrites all terms in the context to enable SMT rewriting through the use of fresh, unconstrained unification variables let rewrite_term_for_smt (env:env) (am:amap term * list term) (a:atom) : Tac (amap term * list term) = let am, prev_uvar_terms = am in let term = select a am in let hd, args = collect_app term in let t = tc env hd in let new_args, uvar_terms = new_args_for_smt_attrs env args t in let new_term = mk_app hd new_args in update a new_term am, List.Tot.append uvar_terms prev_uvar_terms /// User-facing error message when the framing tactic fails let fail_atoms (#a:Type) (l1 l2:list atom) (am:amap term) : Tac a = fail ("could not find a solution for unifying\n" ^ print_atoms l1 am ^ "\nand\n" ^ print_atoms l2 am) /// Variant of equivalent_lists' below to be called once terms have been rewritten to allow SMT rewriting. /// If unification succeeds and we have unicity of the solution, this tactic will succeed, /// and ultimately create an SMT guard that the two terms are actually equal let rec equivalent_lists_fallback (n:nat) (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * bool) = match l1 with | [] -> begin match l2 with | [] -> (l1_del, l2_del, false) | [hd] -> // Succeed if there is only one uvar left in l2, which can be therefore // be unified with emp if is_uvar (select hd am) then ( // xsdenote is left associative: We put hd at the top to get // ?u `star` p <==> emp `star` p (l1_del, hd :: l2_del, true)) else fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) | _ -> fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) end | _ -> if is_only_uvar l2 am then ( // Terms left in l1, but only a uvar left in l2. // Put all terms left at the end of l1_rem, so that they can be unified // with exactly the uvar because of the structure of xsdenote try_unifying_remaining l1 (get_head l2 am) am; l1_del `List.Tot.append` l1, l2_del `List.Tot.append` l2, false ) else let rem1, rem2, l1_del', l2_del' = equivalent_lists_once l1 l2 l1_del l2_del am in let n' = List.Tot.length rem1 in if n' >= n then // Should always be smaller or equal to n // If it is equal, no progress was made. fail_atoms rem1 rem2 am else equivalent_lists_fallback n' rem1 rem2 l1_del' l2_del' am /// Iterates over all terms in [l2] to prepare them for unification with SMT rewriting let replace_smt_uvars (l1 l2:list atom) (am:amap term) : Tac (amap term * list term) = let env = cur_env () in fold_left (rewrite_term_for_smt env) (am, []) l2 /// Recursively calls equivalent_lists_once. /// Stops when we're done with unification, or when we didn't make any progress /// If we didn't make any progress, we have too many candidates for some terms. /// Accumulates rewritings of l1 and l2 in l1_del and l2_del, with the invariant /// that the two lists are unifiable at any point /// The boolean indicates if there is a leftover empty frame let rec equivalent_lists' (n:nat) (use_smt:bool) (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * bool * list term) = match l1 with | [] -> begin match l2 with | [] -> (l1_del, l2_del, false, []) | [hd] -> // Succeed if there is only one uvar left in l2, which can be therefore // be unified with emp if is_uvar (select hd am) then ( // xsdenote is left associative: We put hd at the top to get // ?u `star` p <==> emp `star` p (l1_del, hd :: l2_del, true, [])) else fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) | _ -> fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) end | _ -> if is_only_uvar l2 am then ( // Terms left in l1, but only a uvar left in l2. // Put all terms left at the end of l1_rem, so that they can be unified // with exactly the uvar because of the structure of xsdenote try_unifying_remaining l1 (get_head l2 am) am; l1_del `List.Tot.append` l1, l2_del `List.Tot.append` l2, false, [] ) else let rem1, rem2, l1_del', l2_del' = equivalent_lists_once l1 l2 l1_del l2_del am in let n' = List.Tot.length rem1 in if n' >= n then ( // Try to make progress by matching non-uvars of l2 with candidates in l1 let rem1, rem2, l1_del', l2_del' = equivalent_lists_once_l2 rem1 rem2 l1_del' l2_del' am in let n' = List.Tot.length rem1 in if n' >= n then ( // Should always be smaller or equal to n // If it is equal, no progress was made. if use_smt then // SMT fallback is allowed let new_am, uvar_terms = replace_smt_uvars rem1 rem2 am in let l1_f, l2_f, b = equivalent_lists_fallback n' rem1 rem2 l1_del' l2_del' new_am in l1_f, l2_f, b, uvar_terms else fail_atoms rem1 rem2 am ) else equivalent_lists' n' use_smt rem1 rem2 l1_del' l2_del' am ) else equivalent_lists' n' use_smt rem1 rem2 l1_del' l2_del' am /// Checks if term for atom t unifies with fall uvars in l let rec unifies_with_all_uvars (t:term) (l:list atom) (am:amap term) : Tac bool = match l with | [] -> true | hd::tl -> if unifies_with_all_uvars t tl am then ( // Unified with tail, try this term let hd_t = select hd am in if is_uvar hd_t then ( // The head term is a uvar, try unifying try if unify t hd_t then raise Success else raise Failed with | Success -> true | _ -> false ) else true // The uvar is not a head term, we do not need to try it ) else false /// Puts all terms in l1 that cannot unify with the uvars in l2 at the top: /// They need to be solved first let rec most_restricted_at_top (l1 l2:list atom) (am:amap term) : Tac (list atom) = match l1 with | [] -> [] | hd::tl -> if unifies_with_all_uvars (select hd am) l2 am then (most_restricted_at_top tl l2 am) `List.Tot.append` [hd] else hd::(most_restricted_at_top tl l2 am) /// Core AC-unification tactic. /// First remove all trivially equal terms, then try to decide equivalence. /// Assumes that l1 does not contain any vprop uvar. /// If it succeeds, returns permutations of l1, l2, and a boolean indicating /// if l2 has a trailing empty frame to be unified let equivalent_lists (use_smt:bool) (l1 l2:list atom) (am:amap term) : Tac (list atom * list atom * bool * list term) = let l1, l2, l1_del, l2_del = trivial_cancels l1 l2 am in let l1 = most_restricted_at_top l1 l2 am in let n = List.Tot.length l1 in let l1_del, l2_del, emp_frame, uvar_terms = equivalent_lists' n use_smt l1 l2 l1_del l2_del am in l1_del, l2_del, emp_frame, uvar_terms (* Helpers to relate the actual terms to their representation as a list of atoms *) open FStar.Reflection.V2.Derived.Lemmas let rec list_to_string (l:list term) : Tac string = match l with | [] -> "end" | hd::tl -> term_to_string hd ^ " " ^ list_to_string tl let rec mdenote_gen (#a:Type u#aa) (unit:a) (mult:a -> a -> a) (am:amap a) (e:exp) : a = match e with | Unit -> unit | Atom x -> select x am | Mult e1 e2 -> mult (mdenote_gen unit mult am e1) (mdenote_gen unit mult am e2) let rec xsdenote_gen (#a:Type) (unit:a) (mult:a -> a -> a) (am:amap a) (xs:list atom) : a = match xs with | [] -> unit | [x] -> select x am | x::xs' -> mult (select x am) (xsdenote_gen unit mult am xs') unfold let mdenote (#a:Type u#aa) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e:exp) : a = let open FStar.Algebra.CommMonoid.Equiv in mdenote_gen (CM?.unit m) (CM?.mult m) am e unfold let xsdenote (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs:list atom) : a = let open FStar.Algebra.CommMonoid.Equiv in xsdenote_gen (CM?.unit m) (CM?.mult m) am xs // We reimplement List.Tot.Base.append because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_append (#t: Type) (l1 l2: list t) : Pure (list t) (requires True) (ensures (fun res -> res == l1 `List.Tot.append` l2)) (decreases l1) = match l1 with | [] -> l2 | a :: q -> a :: my_append q l2 let rec flatten (e:exp) : list atom = match e with | Unit -> [] | Atom x -> [x] | Mult e1 e2 -> flatten e1 `my_append` flatten e2 let rec flatten_correct_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs1 xs2:list atom) : Lemma (xsdenote eq m am (xs1 `my_append` xs2) `CE.EQ?.eq eq` CE.CM?.mult m (xsdenote eq m am xs1) (xsdenote eq m am xs2)) = let open FStar.Algebra.CommMonoid.Equiv in match xs1 with | [] -> CM?.identity m (xsdenote eq m am xs2); EQ?.symmetry eq (CM?.mult m (CM?.unit m) (xsdenote eq m am xs2)) (xsdenote eq m am xs2) | [x] -> ( if (Nil? xs2) then (right_identity eq m (select x am); EQ?.symmetry eq (CM?.mult m (select x am) (CM?.unit m)) (select x am)) else EQ?.reflexivity eq (CM?.mult m (xsdenote eq m am [x]) (xsdenote eq m am xs2))) | x::xs1' -> flatten_correct_aux eq m am xs1' xs2; EQ?.reflexivity eq (select x am); CM?.congruence m (select x am) (xsdenote eq m am (xs1' `my_append` xs2)) (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2)); CM?.associativity m (select x am) (xsdenote eq m am xs1') (xsdenote eq m am xs2); EQ?.symmetry eq (CM?.mult m (CM?.mult m (select x am) (xsdenote eq m am xs1')) (xsdenote eq m am xs2)) (CM?.mult m (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2))); EQ?.transitivity eq (CM?.mult m (select x am) (xsdenote eq m am (xs1' `my_append` xs2))) (CM?.mult m (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2))) (CM?.mult m (CM?.mult m (select x am) (xsdenote eq m am xs1')) (xsdenote eq m am xs2)) let rec flatten_correct (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e:exp) : Lemma (mdenote eq m am e `CE.EQ?.eq eq` xsdenote eq m am (flatten e)) = let open FStar.Algebra.CommMonoid.Equiv in match e with | Unit -> EQ?.reflexivity eq (CM?.unit m) | Atom x -> EQ?.reflexivity eq (select x am) | Mult e1 e2 -> flatten_correct_aux eq m am (flatten e1) (flatten e2); EQ?.symmetry eq (xsdenote eq m am (flatten e1 `my_append` flatten e2)) (CM?.mult m (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2))); flatten_correct eq m am e1; flatten_correct eq m am e2; CM?.congruence m (mdenote eq m am e1) (mdenote eq m am e2) (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2)); EQ?.transitivity eq (CM?.mult m (mdenote eq m am e1) (mdenote eq m am e2)) (CM?.mult m (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2))) (xsdenote eq m am (flatten e1 `my_append` flatten e2)) let monoid_reflect (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e1 e2:exp) (_ : squash (xsdenote eq m am (flatten e1) `CE.EQ?.eq eq` xsdenote eq m am (flatten e2))) : squash (mdenote eq m am e1 `CE.EQ?.eq eq` mdenote eq m am e2) = flatten_correct eq m am e1; flatten_correct eq m am e2; CE.EQ?.symmetry eq (mdenote eq m am e2) (xsdenote eq m am (flatten e2)); CE.EQ?.transitivity eq (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2)) (mdenote eq m am e2); CE.EQ?.transitivity eq (mdenote eq m am e1) (xsdenote eq m am (flatten e1)) (mdenote eq m am e2) // Here we sort the variable numbers // We reimplement List.Tot.Base.sortWith because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_partition (#a: Type) (f: (a -> Tot bool)) (l: list a) : Pure (list a & list a) (requires True) (ensures (fun res -> res == List.Tot.partition f l)) = match l with | [] -> [], [] | hd::tl -> let l1, l2 = my_partition f tl in if f hd then hd::l1, l2 else l1, hd::l2 let rec partition_ext (#a: Type) (f1 f2: (a -> Tot bool)) (l: list a) : Lemma (requires (forall x . f1 x == f2 x)) (ensures (List.Tot.partition f1 l == List.Tot.partition f2 l)) = match l with | [] -> () | hd::tl -> partition_ext f1 f2 tl let my_bool_of_compare (#a: Type) (f: a -> a -> Tot int) (x: a) (y: a) : Tot bool = f x y < 0 let rec my_sortWith (#a: Type) (f: (a -> a -> Tot int)) (l:list a) : Pure (list a) (requires True) (ensures (fun res -> res == List.Tot.sortWith f l)) (decreases (List.Tot.length l)) = match l with | [] -> [] | pivot::tl -> let hi, lo = my_partition (my_bool_of_compare f pivot) tl in partition_ext (my_bool_of_compare f pivot) (List.Tot.bool_of_compare f pivot) tl; List.Tot.partition_length (List.Tot.bool_of_compare f pivot) tl; my_append (my_sortWith f lo) (pivot::my_sortWith f hi) let rec sortWith_ext (#a: Type) (f1 f2: (a -> a -> Tot int)) (l: list a) : Lemma (requires (forall x y . f1 x y == f2 x y)) (ensures (List.Tot.sortWith f1 l == List.Tot.sortWith f2 l)) (decreases (List.Tot.length l)) = match l with | [] -> () | pivot::tl -> partition_ext (List.Tot.bool_of_compare f1 pivot) (List.Tot.bool_of_compare f2 pivot) tl; List.Tot.partition_length (List.Tot.bool_of_compare f1 pivot) tl; let hi, lo = List.Tot.partition (List.Tot.bool_of_compare f1 pivot) tl in sortWith_ext f1 f2 lo; sortWith_ext f1 f2 hi let permute = list atom -> list atom let my_compare_of_bool (#a:eqtype) (rel: a -> a -> Tot bool) (x: a) (y: a) : Tot int = if x `rel` y then -1 else if x = y then 0 else 1 let sort : permute = my_sortWith #int (my_compare_of_bool (<)) #push-options "--fuel 1 --ifuel 1" let lemma_xsdenote_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (hd:atom) (tl:list atom) : Lemma (xsdenote eq m am (hd::tl) `CE.EQ?.eq eq` (CE.CM?.mult m (select hd am) (xsdenote eq m am tl))) = let open FStar.Algebra.CommMonoid.Equiv in match tl with | [] -> assert (xsdenote eq m am (hd::tl) == select hd am); CM?.identity m (select hd am); EQ?.symmetry eq (CM?.unit m `CM?.mult m` select hd am) (select hd am); CM?.commutativity m (CM?.unit m) (select hd am); EQ?.transitivity eq (xsdenote eq m am (hd::tl)) (CM?.unit m `CM?.mult m` select hd am) (CM?.mult m (select hd am) (xsdenote eq m am tl)) | _ -> EQ?.reflexivity eq (xsdenote eq m am (hd::tl)) let rec partition_equiv (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (pivot:atom) (q:list atom) : Lemma (let open FStar.List.Tot.Base in let hi, lo = partition (bool_of_compare (compare_of_bool (<)) pivot) q in CE.EQ?.eq eq (xsdenote eq m am hi `CE.CM?.mult m` xsdenote eq m am lo) (xsdenote eq m am q)) = let open FStar.Algebra.CommMonoid.Equiv in let open FStar.List.Tot.Base in let f = bool_of_compare (compare_of_bool (<)) pivot in let hi, lo = partition f q in match q with | [] -> CM?.identity m (xsdenote eq m am hi) | hd::tl -> let l1, l2 = partition f tl in partition_equiv eq m am pivot tl; assert (EQ?.eq eq (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (xsdenote eq m am tl)); EQ?.reflexivity eq (xsdenote eq m am l1); EQ?.reflexivity eq (xsdenote eq m am l2); EQ?.reflexivity eq (xsdenote eq m am hi); EQ?.reflexivity eq (xsdenote eq m am lo); if f hd then begin assert (hi == hd::l1 /\ lo == l2); lemma_xsdenote_aux eq m am hd l1; CM?.congruence m (xsdenote eq m am hi) (xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am l1) (xsdenote eq m am l2); CM?.associativity m (select hd am) (xsdenote eq m am l1) (xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) ((select hd am `CM?.mult m` xsdenote eq m am l1) `CM?.mult m` xsdenote eq m am l2) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)); EQ?.reflexivity eq (select hd am); CM?.congruence m (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (select hd am) (xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)) (select hd am `CM?.mult m` xsdenote eq m am tl); lemma_xsdenote_aux eq m am hd tl; EQ?.symmetry eq (xsdenote eq m am (hd::tl)) (select hd am `CM?.mult m` xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am tl) (xsdenote eq m am (hd::tl)) end else begin assert (hi == l1 /\ lo == hd::l2); lemma_xsdenote_aux eq m am hd l2; CM?.congruence m (xsdenote eq m am hi) (xsdenote eq m am lo) (xsdenote eq m am l1) (select hd am `CM?.mult m` xsdenote eq m am l2); CM?.commutativity m (xsdenote eq m am l1) (select hd am `CM?.mult m` xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (xsdenote eq m am l1 `CM?.mult m` (select hd am `CM?.mult m` xsdenote eq m am l2)) ((select hd am `CM?.mult m` xsdenote eq m am l2) `CM?.mult m` xsdenote eq m am l1); CM?.associativity m (select hd am) (xsdenote eq m am l2) (xsdenote eq m am l1); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) ((select hd am `CM?.mult m` xsdenote eq m am l2) `CM?.mult m` xsdenote eq m am l1) (select hd am `CM?.mult m` (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1)); CM?.commutativity m (xsdenote eq m am l2) (xsdenote eq m am l1); EQ?.reflexivity eq (select hd am); CM?.congruence m (select hd am) (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1) (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1)) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)); CM?.congruence m (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (select hd am) (xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)) (select hd am `CM?.mult m` xsdenote eq m am tl); lemma_xsdenote_aux eq m am hd tl; EQ?.symmetry eq (xsdenote eq m am (hd::tl)) (select hd am `CM?.mult m` xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am tl) (xsdenote eq m am (hd::tl)) end let rec sort_correct_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs:list atom) : Lemma (requires True) (ensures xsdenote eq m am xs `CE.EQ?.eq eq` xsdenote eq m am (sort xs)) (decreases (FStar.List.Tot.Base.length xs)) = let open FStar.Algebra.CommMonoid.Equiv in match xs with | [] -> EQ?.reflexivity eq (xsdenote eq m am []) | pivot::q -> let sort0 : permute = List.Tot.sortWith #int (List.Tot.compare_of_bool (<)) in let sort_eq (l: list atom) : Lemma (sort l == sort0 l) [SMTPat (sort l)] = sortWith_ext (my_compare_of_bool (<)) (List.Tot.compare_of_bool (<)) l in let open FStar.List.Tot.Base in let f:int -> int -> int = compare_of_bool (<) in let hi, lo = partition (bool_of_compare f pivot) q in flatten_correct_aux eq m am (sort lo) (pivot::sort hi); assert (xsdenote eq m am (sort xs) `EQ?.eq eq` CM?.mult m (xsdenote eq m am (sort lo)) (xsdenote eq m am (pivot::sort hi))); lemma_xsdenote_aux eq m am pivot (sort hi); EQ?.reflexivity eq (xsdenote eq m am (sort lo)); CM?.congruence m (xsdenote eq m am (sort lo)) (xsdenote eq m am (pivot::sort hi)) (xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` xsdenote eq m am (pivot::sort hi)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)))); CM?.commutativity m (xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)); CM?.associativity m (select pivot am) (xsdenote eq m am (sort hi)) (xsdenote eq m am (sort lo)); EQ?.transitivity eq (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))) ((select pivot am `CM?.mult m` xsdenote eq m am (sort hi)) `CM?.mult m` xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)))); partition_length (bool_of_compare f pivot) q; sort_correct_aux eq m am hi; sort_correct_aux eq m am lo; EQ?.symmetry eq (xsdenote eq m am lo) (xsdenote eq m am (sort lo)); EQ?.symmetry eq (xsdenote eq m am hi) (xsdenote eq m am (sort hi)); CM?.congruence m (xsdenote eq m am (sort hi)) (xsdenote eq m am (sort lo)) (xsdenote eq m am hi) (xsdenote eq m am lo); assert (EQ?.eq eq (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)); EQ?.reflexivity eq (select pivot am); CM?.congruence m (select pivot am) (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)) (select pivot am) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo))); partition_equiv eq m am pivot q; CM?.congruence m (select pivot am) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select pivot am) (xsdenote eq m am q); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)) (select pivot am `CM?.mult m` (xsdenote eq m am q)); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am q))); lemma_xsdenote_aux eq m am pivot q; EQ?.symmetry eq (xsdenote eq m am (pivot::q)) (select pivot am `CM?.mult m` (xsdenote eq m am q)); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am q)) (xsdenote eq m am xs); EQ?.symmetry eq (xsdenote eq m am (sort xs)) (xsdenote eq m am xs) #pop-options #push-options "--fuel 0 --ifuel 0" (* Lemmas to be called after a permutation compatible with AC-unification was found *) let smt_reflexivity (#a:Type) (eq:CE.equiv a) (x y:a) : Lemma (requires x == y) (ensures CE.EQ?.eq eq x y) = CE.EQ?.reflexivity eq x let identity_left_smt (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x y:a) : Lemma (requires x == y) (ensures CE.EQ?.eq eq x (CE.CM?.mult m (CE.CM?.unit m) y)) = CE.CM?.identity m x; CE.EQ?.symmetry eq (CE.CM?.mult m (CE.CM?.unit m) x) x let identity_left (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x:a) : Lemma (CE.EQ?.eq eq x (CE.CM?.mult m (CE.CM?.unit m) x)) = CE.CM?.identity m x; CE.EQ?.symmetry eq (CE.CM?.mult m (CE.CM?.unit m) x) x let identity_right_diff (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x y:a) : Lemma (requires CE.EQ?.eq eq x y) (ensures CE.EQ?.eq eq (CE.CM?.mult m x (CE.CM?.unit m)) y) = CE.right_identity eq m x; CE.EQ?.transitivity eq (CE.CM?.mult m x (CE.CM?.unit m)) x y /// Dismiss possible vprops goals that might have been created by lemma application. /// These vprops will be instantiated at a later stage; else, Meta-F* will raise an error let rec dismiss_slprops () : Tac unit = match term_as_formula' (cur_goal ()) with | App t _ -> if is_fvar t (`%squash) then () else (dismiss(); dismiss_slprops ()) | _ -> dismiss(); dismiss_slprops () /// Recursively removing trailing empty assertions let rec n_identity_left (n:int) (eq m:term) : Tac unit = if n = 0 then ( apply_lemma (`(CE.EQ?.reflexivity (`#eq))); // Cleaning up, in case a uvar has been generated here. It'll be solved later set_goals []) else ( apply_lemma (`identity_right_diff (`#eq) (`#m)); // Drop the slprops generated, they will be solved later dismiss_slprops (); n_identity_left (n-1) eq m ) /// Helper lemma: If two vprops (as represented by lists of atoms) are equivalent, then their canonical forms /// (corresponding to applying the sort function on atoms) are equivalent let equivalent_sorted (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (l1 l2 l1' l2':list atom) : Lemma (requires sort l1 == sort l1' /\ sort l2 == sort l2' /\ xsdenote eq m am l1 `CE.EQ?.eq eq` xsdenote eq m am l2) (ensures xsdenote eq m am l1' `CE.EQ?.eq eq` xsdenote eq m am l2') = let open FStar.Algebra.CommMonoid.Equiv in sort_correct_aux eq m am l1'; sort_correct_aux eq m am l1; EQ?.symmetry eq (xsdenote eq m am l1) (xsdenote eq m am (sort l1)); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am (sort l1')) (xsdenote eq m am l1); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am l1) (xsdenote eq m am l2); sort_correct_aux eq m am l2; EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am l2) (xsdenote eq m am (sort l2)); sort_correct_aux eq m am l2'; EQ?.symmetry eq (xsdenote eq m am l2') (xsdenote eq m am (sort l2')); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am (sort l2)) (xsdenote eq m am l2') #pop-options /// Finds the position of first occurrence of x in xs. /// This is now specialized to terms and their funny term_eq_old. let rec where_aux (n:nat) (x:term) (xs:list term) : Tac (option nat) (decreases xs) = match xs with | [] -> None | x'::xs' -> if term_eq_old x x' then Some n else where_aux (n+1) x xs' let where = where_aux 0 let fatom (t:term) (ts:list term) (am:amap term) : Tac (exp * list term * amap term) = match where t ts with | Some v -> (Atom v, ts, am) | None -> let vfresh = List.Tot.Base.length ts in let t = norm_term [iota; zeta] t in (Atom vfresh, ts `List.Tot.append` [t], update vfresh t am) /// Transforming a term into the corresponding list of atoms /// If the atomic terms were already present in the map [am], then /// they correspond to the same atoms /// This expects that mult, unit, and t have already been normalized let rec reification_aux (ts:list term) (am:amap term) (mult unit t : term) : Tac (exp * list term * amap term) = let hd, tl = collect_app_ref t in match inspect_unascribe hd, List.Tot.Base.list_unref tl with | Tv_FVar fv, [(t1, Q_Explicit) ; (t2, Q_Explicit)] -> if term_eq_old (pack (Tv_FVar fv)) mult then (let (e1, ts, am) = reification_aux ts am mult unit t1 in let (e2, ts, am) = reification_aux ts am mult unit t2 in (Mult e1 e2, ts, am)) else fatom t ts am | _, _ -> if term_eq_old t unit then (Unit, ts, am) else fatom t ts am /// Performs the required normalization before calling the function above let reification (eq: term) (m: term) (ts:list term) (am:amap term) (t:term) : Tac (exp * list term * amap term) = let mult = norm_term [iota; zeta; delta] (`CE.CM?.mult (`#m)) in let unit = norm_term [iota; zeta; delta] (`CE.CM?.unit (`#m)) in let t = norm_term [iota; zeta] t in reification_aux ts am mult unit t /// Meta-F* internal: Transforms the atom map into a term let rec convert_map (m : list (atom * term)) : term = match m with | [] -> `[] | (a, t)::ps -> let a = pack (Tv_Const (C_Int a)) in (* let t = norm_term [delta] t in *) `((`#a, (`#t)) :: (`#(convert_map ps))) /// `am` is an amap (basically a list) of terms, each representing a value /// of type `a` (whichever we are canonicalizing). This functions converts /// `am` into a single `term` of type `amap a`, suitable to call `mdenote` with *) let convert_am (am : amap term) : term = let (map, def) = am in (* let def = norm_term [delta] def in *) `( (`#(convert_map map), `#def) ) /// Transforms a term representatoin into a term through quotation let rec quote_exp (e:exp) : term = match e with | Unit -> (`Unit) | Mult e1 e2 -> (`Mult (`#(quote_exp e1)) (`#(quote_exp e2))) | Atom n -> let nt = pack (Tv_Const (C_Int n)) in (`Atom (`#nt)) let rec quote_atoms (l:list atom) = match l with | [] -> `[] | hd::tl -> let nt = pack (Tv_Const (C_Int hd)) in (`Cons (`#nt) (`#(quote_atoms tl))) /// Some internal normalization steps to make reflection of vprops into atoms and atom permutation go smoothly. /// We reimplemented sorting/list functions to normalize our uses without normalizing those introduced by the user. let normal_tac_steps = [primops; iota; zeta; delta_only [ `%mdenote; `%select; `%my_assoc; `%my_append; `%flatten; `%sort; `%my_sortWith; `%my_partition; `%my_bool_of_compare; `%my_compare_of_bool; `%fst; `%__proj__Mktuple2__item___1; `%snd; `%__proj__Mktuple2__item___2; `%CE.__proj__CM__item__unit; `%CE.__proj__CM__item__mult; `%rm]] /// The normalization function, using the above normalization steps let normal_tac (#a:Type) (x:a) : a = FStar.Pervasives.norm normal_tac_steps x /// Helper lemma to establish relation between normalized and initial values let normal_elim (x:Type0) : Lemma (requires x) (ensures normal_tac x) = () exception Result of list atom * list atom * bool * list term /// F* equalities are typed, but the generated type sometimes is a unification variable. /// This helper ensures that such unification variables are not left unresolved, which would lead to an error let close_equality_typ' (t:term) : Tac unit = let f = term_as_formula' t in match f with | Comp (Eq (Some u)) l _ -> if is_uvar u then (unshelve u; exact_with_ref (tc (cur_env()) l)) | _ -> () /// Recursively closing equality types in a given term (usually a unification constraint) let close_equality_typ (t:term) : Tac unit = visit_tm close_equality_typ' t /// Core unification tactic. /// Transforms terms into their atom representations, /// Tries to find a solution to AC-unification, and if so, /// soundly permutes the atom representations before calling the unifier /// to check the validity of the provided solution. /// In the case where SMT rewriting was needed, equalities abduction is performed by instantiating the /// abduction prop unification variable with the corresponding guard /// 09/24: /// /// The tactic internally builds a map from atoms to terms /// and uses the map for reflecting the goal to atoms representation /// During reflection, the tactics engine typechecks the amap, and hence all /// the terms again /// This typechecking of terms is unnecessary, since the terms are coming /// from the goal, and hence are already well-typed /// Worse, re-typechecking them may generate a lot of SMT queries /// And even worse, the SMT queries are discharged in the static context, /// requiring various workarounds (e.g. squash variables for if conditions etc.) /// /// To fix this, we now "name" the terms and use the amap with names /// /// Read through the canon_l_r function for how we do this /// The following three lemmas are helpers to manipulate the goal in canon_l_r [@@ no_subtyping] let inst_bv (#a:Type) (#p:a -> Type0) (#q:Type0) (x:a) (_:squash (p x ==> q)) : Lemma ((forall (x:a). p x) ==> q) = () let modus_ponens (#p #q:Type0) (_:squash p) : Lemma ((p ==> q) ==> q) = () let cut (p q:Type0) : Lemma (requires p /\ (p ==> q)) (ensures q) = () let and_true (p: Type0) : Lemma (requires (p /\ (p ==> True))) (ensures p) = () let solve_implies_true (p: Type0) : Lemma (p ==> True) = () // This exception is raised for failures that should not be considered // hard but should allow postponing the goal instead exception Postpone of string (* NOTE! Redefining boolean disjunction to *not* be short-circuiting, since we cannot use an effectful result as argument of Prims.op_BarBar *) private let bor = op_BarBar private let is_and (t:term) : bool = is_any_fvar t [`%(/\); `%prop_and] private let is_squash (t:term) : bool = is_any_fvar t [`%squash; `%auto_squash] private let is_star (t:term) : bool = is_any_fvar t [`%star; `%VStar] private let is_star_or_unit (t:term) : bool = is_any_fvar t [`%star; `%VStar; `%VUnit] let rec unify_pr_with_true (pr: term) : Tac unit = let hd, tl = collect_app pr in if is_and hd then match tl with | [pr_l, _; pr_r, _] -> unify_pr_with_true pr_l; unify_pr_with_true pr_r | _ -> fail "unify_pr_with_true: ill-formed /\\" else match inspect_unascribe hd with | Tv_Uvar _ _ -> if unify pr (`true_p) then () else begin fail "unify_pr_with_true: could not unify SMT prop with True" end | _ -> if List.Tot.length (free_uvars pr) = 0 then () else // postpone the goal instead of failing hard, to allow for other goals to solve those uvars raise (Postpone "unify_pr_with_true: some uvars are still there") let elim_and_l_squash (#a #b: Type0) (#goal: Type0) (f: (a -> Tot (squash goal))) (h: (a /\ b)) : Tot (squash goal) = let f' (x: squash a) : Tot (squash goal) = FStar.Squash.bind_squash x f in let elim_impl (x: squash (a /\ b)) : Tot (squash a) = () in f' (elim_impl (FStar.Squash.return_squash h)) let elim_and_r_squash (#a #b: Type0) (#goal: Type0) (f: (b -> Tot (squash goal))) (h: (a /\ b)) : Tot (squash goal) = let f' (x: squash b) : Tot (squash goal) = FStar.Squash.bind_squash x f in let elim_impl (x: squash (a /\ b)) : Tot (squash b) = () in f' (elim_impl (FStar.Squash.return_squash h)) let _return_squash (#a: Type) () (x: a) : Tot (squash a) = FStar.Squash.return_squash x let rec set_abduction_variable_term (pr: term) : Tac term = let hd, tl = collect_app pr in if is_and hd then match tl with | (pr_l, Q_Explicit) :: (pr_r, Q_Explicit) :: [] -> if List.Tot.length (free_uvars pr_r) = 0 then let arg = set_abduction_variable_term pr_l in mk_app (`elim_and_l_squash) [arg, Q_Explicit] else if List.Tot.length (free_uvars pr_l) = 0 then let arg = set_abduction_variable_term pr_r in mk_app (`elim_and_r_squash) [arg, Q_Explicit] else // postpone the goal instead of failing hard, to allow for other goals to solve those uvars raise (Postpone "set_abduction_variable_term: there are still uvars on both sides of l_and") | _ -> fail "set_abduction_variable: ill-formed /\\" else match inspect hd with | Tv_Uvar _ _ -> mk_app (`_return_squash) [`(), Q_Explicit] | _ -> fail "set_abduction_variable: cannot unify" let set_abduction_variable () : Tac unit = let g = cur_goal () in match inspect_unascribe g with | Tv_Arrow b _ -> let pr = b.sort in exact (set_abduction_variable_term pr) | _ -> fail "Not an arrow goal" let canon_l_r (use_smt:bool) (carrier_t:term) //e.g. vprop (eq:term) (m:term) (pr pr_bind:term) (lhs rel rhs:term) : Tac unit = let m_unit = norm_term [iota; zeta; delta] (`(CE.CM?.unit (`#m))) in let m_mult = norm_term [iota; zeta; delta] (`(CE.CM?.mult (`#m))) in let am = const m_unit in (* empty map *) let (r1_raw, ts, am) = reification eq m [] am lhs in let (r2_raw, _, am) = reification eq m ts am rhs in // Encapsulating this in a try/with to avoid spawning uvars for smt_fallback let l1_raw, l2_raw, emp_frame, uvar_terms = try let res = equivalent_lists use_smt (flatten r1_raw) (flatten r2_raw) am in raise (Result res) with | TacticFailure m -> fail m | Result res -> res | _ -> fail "uncaught exception in equivalent_lists" in //So now we have: // am : amap mapping atoms to terms in lhs and rhs // r1_raw : an expression in the atoms language for lhs // r2_raw : an expression in the atoms language for rhs // l1_raw : sorted list of atoms in lhs // l2_raw : sorted list of atoms in rhs // //In particular, r1_raw and r2_raw capture lhs and rhs structurally // (i.e. same associativity, emp, etc.) // //Whereas l1_raw and l2_raw are "canonical" representations of lhs and rhs // (vis xsdenote) //Build an amap where atoms are mapped to names //The type of these names is carrier_t passed by the caller let am_bv : list (atom & namedv & typ) = mapi (fun i (a, _) -> let x = fresh_namedv_named ("x" ^ (string_of_int i)) in (a, x, carrier_t)) (fst am) in let am_bv_term : amap term = map (fun (a, bv, _sort) -> a, pack (Tv_Var bv)) am_bv, snd am in let mdenote_tm (e:exp) : term = mdenote_gen m_unit (fun t1 t2 -> mk_app m_mult [(t1, Q_Explicit); (t2, Q_Explicit)]) am_bv_term e in let xsdenote_tm (l:list atom) : term = xsdenote_gen m_unit (fun t1 t2 -> mk_app m_mult [(t1, Q_Explicit); (t2, Q_Explicit)]) am_bv_term l in //Get the named representations of lhs, rhs, and their respective sorted versions let lhs_named = mdenote_tm r1_raw in let rhs_named = mdenote_tm r2_raw in let sorted_lhs_named = xsdenote_tm l1_raw in let sorted_rhs_named = xsdenote_tm l2_raw in //We now build an auxiliary goal of the form: // // forall xs. (sorted_lhs_named `rel` sorted_rhs_names) ==> (lhs_names `rel` rhs_named) // // where xs are the fresh names that we introduced earlier let mk_rel (l r:term) : term = mk_app rel [(l, Q_Explicit); (r, Q_Explicit)] in let imp_rhs = mk_rel lhs_named rhs_named in let imp_lhs = mk_rel sorted_lhs_named sorted_rhs_named in let imp = mk_app (pack (Tv_FVar (pack_fv imp_qn))) [(imp_lhs, Q_Explicit); (imp_rhs, Q_Explicit)] in //fold over names and quantify over them let aux_goal = fold_right (fun (_, nv, sort) t -> let nvv = inspect_namedv nv in let b = { ppname = nvv.ppname; uniq = nvv.uniq; qual = Q_Explicit; attrs = []; sort = sort; } in let _, t = close_term b t in let t = pack (Tv_Abs b t) in mk_app (pack (Tv_FVar (pack_fv forall_qn))) [t, Q_Explicit]) am_bv imp in //Introduce a cut with the auxiliary goal apply_lemma (`cut (`#aux_goal)); //After the cut, the goal looks like: A /\ (A ==> G) // where A is the auxiliary goal and G is the original goal (lhs `rel` rhs) split (); //Solving A: focus (fun _ -> //The proof follows a similar structure as before naming was introduced // //Except that this time, the amap is in terms of names, // and hence its typechecking is faster and (hopefully) no SMT involved //Open the forall binders in A, and use the fresh names to build an amap let am = fold_left (fun am (a, _, _sort) -> let b = forall_intro () in let v = binding_to_namedv b in (a, pack (Tv_Var v))::am) [] am_bv, snd am in //Introduce the lhs of implication let b = implies_intro () in //Now the proof is the plain old canon proof let am = convert_am am in let r1 = quote_exp r1_raw in let r2 = quote_exp r2_raw in change_sq (`(normal_tac (mdenote (`#eq) (`#m) (`#am) (`#r1) `CE.EQ?.eq (`#eq)` mdenote (`#eq) (`#m) (`#am) (`#r2)))); apply_lemma (`normal_elim); apply (`monoid_reflect ); let l1 = quote_atoms l1_raw in let l2 = quote_atoms l2_raw in apply_lemma (`equivalent_sorted (`#eq) (`#m) (`#am) (`#l1) (`#l2)); if List.Tot.length (goals ()) = 0 then () else begin norm [primops; iota; zeta; delta_only [`%xsdenote; `%select; `%my_assoc; `%my_append; `%flatten; `%sort; `%my_sortWith; `%my_partition; `%my_bool_of_compare; `%my_compare_of_bool; `%fst; `%__proj__Mktuple2__item___1; `%snd; `%__proj__Mktuple2__item___2; `%CE.__proj__CM__item__unit; `%CE.__proj__CM__item__mult; `%rm; `%CE.__proj__EQ__item__eq; `%req; `%star;] ]; //The goal is of the form G1 /\ G2 /\ G3, as in the requires of equivalent_sorted split (); split (); //Solve G1 and G2 by trefl trefl (); trefl (); //G3 is the lhs of the implication in the auxiliary goal // that we have in our assumptions via b apply (`FStar.Squash.return_squash); exact (binding_to_term b) end); dismiss_slprops(); //Our goal now is A ==> G (where G is the original goal (lhs `rel` rhs)) //Open the forall binders ignore (repeatn (List.Tot.length am_bv) (fun _ -> apply_lemma (`inst_bv))); //And apply modus ponens apply_lemma (`modus_ponens); //Now our goal is sorted_lhs_named `rel` sorted_rhs_named // where the names are replaced with fresh uvars (from the repeatn call above) //So we just trefl match uvar_terms with | [] -> // Closing unneeded prop uvar focus (fun _ -> try apply_lemma (`and_true); split (); if emp_frame then apply_lemma (`identity_left (`#eq) (`#m)) else apply_lemma (`(CE.EQ?.reflexivity (`#eq))); unify_pr_with_true pr; // MUST be done AFTER identity_left/reflexivity, which can unify other uvars apply_lemma (`solve_implies_true) with | TacticFailure msg -> fail ("Cannot unify pr with true: " ^ msg) | e -> raise e ) | l -> if emp_frame then ( apply_lemma (`identity_left_smt (`#eq) (`#m)) ) else ( apply_lemma (`smt_reflexivity (`#eq)) ); t_trefl true; close_equality_typ (cur_goal()); revert (); set_abduction_variable () /// Wrapper around the tactic above /// The constraint should be of the shape `squash (equiv lhs rhs)` let canon_monoid (use_smt:bool) (carrier_t:term) (eq m:term) (pr pr_bind:term) : Tac unit = norm [iota; zeta]; let t = cur_goal () in // removing top-level squash application let sq, rel_xy = collect_app_ref t in // unpacking the application of the equivalence relation (lhs `EQ?.eq eq` rhs) (match rel_xy with | [(rel_xy,_)] -> ( let open FStar.List.Tot.Base in let rel, xy = collect_app_ref rel_xy in if (length xy >= 2) then ( match index xy (length xy - 2) , index xy (length xy - 1) with | (lhs, Q_Explicit) , (rhs, Q_Explicit) -> canon_l_r use_smt carrier_t eq m pr pr_bind lhs rel rhs | _ -> fail "Goal should have been an application of a binary relation to 2 explicit arguments" ) else ( fail "Goal should have been an application of a binary relation to n implicit and 2 explicit arguments" ) ) | _ -> fail "Goal should be squash applied to a binary relation") /// Instantiation of the generic AC-unification tactic with the vprop commutative monoid let canon' (use_smt:bool) (pr:term) (pr_bind:term) : Tac unit = canon_monoid use_smt (pack (Tv_FVar (pack_fv [`%vprop]))) (`req) (`rm) pr pr_bind /// Counts the number of unification variables corresponding to vprops in the term [t] let rec slterm_nbr_uvars (t:term) : Tac int = match inspect_unascribe t with | Tv_Uvar _ _ -> 1 | Tv_App _ _ -> let hd, args = collect_app t in if is_star_or_unit hd then // Only count the number of unresolved slprops, not program implicits slterm_nbr_uvars_argv args else if is_uvar hd then 1 else 0 | Tv_Abs _ t -> slterm_nbr_uvars t | _ -> 0 and slterm_nbr_uvars_argv (args: list argv) : Tac int = fold_left (fun n (x, _) -> n + slterm_nbr_uvars x) 0 args let guard_vprop (v: vprop) : Tot vprop = v let rec all_guards_solved (t: term) : Tac bool = match inspect_unascribe t with | Tv_Abs _ t -> all_guards_solved t | Tv_App _ _ -> let hd, args = collect_app t in if hd `is_fvar` (`%guard_vprop) then slterm_nbr_uvars_argv args = 0 else if not (all_guards_solved hd) then false else List.Tot.fold_left (fun (tac: (unit -> Tac bool)) (tm, _) -> let f () : Tac bool = if all_guards_solved tm then tac () else false in f ) (let f () : Tac bool = true in f) args () | _ -> true let unfold_guard () : Tac bool = if all_guards_solved (cur_goal ()) then begin focus (fun _ -> norm [delta_only [(`%guard_vprop)]]); true end else false let rec term_is_uvar (t: term) (i: int) : Tac bool = match inspect t with | Tv_Uvar i' _ -> i = i' | Tv_App _ _ -> let hd, args = collect_app t in term_is_uvar hd i | _ -> false val solve_can_be_split_for : string -> Tot unit val solve_can_be_split_lookup : unit // FIXME: src/reflection/FStar.Reflection.Basic.lookup_attr only supports fvar attributes, so we cannot directly look up for (solve_can_be_split_for blabla), we need a nullary attribute to use with lookup_attr let rec dismiss_all_but_last' (l: list goal) : Tac unit = match l with | [] | [_] -> set_goals l | _ :: q -> dismiss_all_but_last' q let dismiss_all_but_last () : Tac unit = dismiss_all_but_last' (goals ()) let rec dismiss_non_squash_goals' (keep:list goal) (goals:list goal) : Tac unit = match goals with | [] -> set_goals (List.Tot.rev keep) | hd :: tl -> let f = term_as_formula' (goal_type hd) in match f with | App hs _ -> if is_squash hs then dismiss_non_squash_goals' (hd::keep) tl else dismiss_non_squash_goals' keep tl | _ -> dismiss_non_squash_goals' keep tl let dismiss_non_squash_goals () = let g = goals () in dismiss_non_squash_goals' [] g let rec term_mem (te: term) (l: list term) : Tac bool = match l with | [] -> false | t' :: q -> if te `term_eq_old` t' then true else term_mem te q let rec lookup_by_term_attr' (attr: term) (e: env) (found: list fv) (l: list fv) : Tac (list fv) = match l with | [] -> List.Tot.rev found | f :: q -> let n = inspect_fv f in begin match lookup_typ e n with | None -> lookup_by_term_attr' attr e found q | Some se -> let found' = if attr `term_mem` sigelt_attrs se then f :: found else found in lookup_by_term_attr' attr e found' q end let lookup_by_term_attr (label_attr: term) (attr: term) : Tac (list fv) = let e = cur_env () in let candidates = lookup_attr label_attr e in lookup_by_term_attr' attr e [] candidates let rec bring_last_goal_on_top' (others: list goal) (goals: list goal) : Tac unit = match goals with | [] -> set_goals (List.Tot.rev others) | last :: [] -> set_goals (last :: List.Tot.rev others) | a :: q -> bring_last_goal_on_top' (a :: others) q let bring_last_goal_on_top () = let g = goals () in bring_last_goal_on_top' [] g let rec extract_contexts (lemma_left lemma_right label_attr attr: term) (t: term) : Tac (option (unit -> Tac unit)) = let hd, tl = collect_app t in if is_star hd then match tl with | (t_left, Q_Explicit) :: (t_right, Q_Explicit) :: [] -> let extract_right () : Tac (option (unit -> Tac unit)) = match extract_contexts lemma_left lemma_right label_attr attr t_right with | None -> None | Some f -> Some (fun _ -> apply_lemma lemma_right; dismiss_all_but_last (); f () ) in begin match extract_contexts lemma_left lemma_right label_attr attr t_left with | None -> extract_right () | Some f -> Some (fun _ -> try apply_lemma lemma_left; dismiss_all_but_last (); f () with _ -> begin match extract_right () with | None -> fail "no context on the right either" | Some g -> g () end ) end | _ -> None else let candidates = let hd_fv = match inspect_unascribe hd with | Tv_FVar fv -> Some fv | Tv_UInst fv _ -> Some fv | _ -> None in match hd_fv with | None -> [] | Some hd_fv -> let hd_s' = implode_qn (inspect_fv hd_fv) in let hd_s = pack (Tv_Const (C_String hd_s')) in lookup_by_term_attr label_attr (mk_app attr [hd_s, Q_Explicit]) in if Nil? candidates then None else Some (fun _ -> first (List.Tot.map (fun candidate _ -> apply_lemma (pack (Tv_FVar candidate)) <: Tac unit) candidates); dismiss_non_squash_goals () ) let extract_cbs_contexts = extract_contexts (`can_be_split_congr_l) (`can_be_split_congr_r) (`solve_can_be_split_lookup) (`solve_can_be_split_for) let open_existentials () : Tac unit = let e = cur_env () in if Nil? (lookup_attr (`solve_can_be_split_lookup) e) then fail "Tactic disabled: no available lemmas in context"; norm [delta_attr [`%__reduce__]]; let t0 = cur_goal () in match collect_app t0 with | _ (* squash/auto_squash *) , (t1, Q_Explicit) :: [] -> let hd, tl = collect_app t1 in if hd `is_fvar` (`%can_be_split) then match tl with | _ (* lhs *) :: (rhs, Q_Explicit) :: [] -> begin match extract_cbs_contexts rhs with | None -> fail "open_existentials: no context found" | Some f -> apply_lemma (`can_be_split_trans_rev); dismiss_all_but_last (); split (); focus f; bring_last_goal_on_top () // so that any preconditions for the selected lemma are scheduled for later end | _ -> fail "open_existentials: ill-formed can_be_split" else fail "open_existentials: not a can_be_split goal" | _ -> fail "open_existentials: not a squash goal" let try_open_existentials () : Tac bool = focus (fun _ -> try open_existentials (); true with _ -> false ) (* Solving the can_be_split* constraints, if they are ready to be scheduled A constraint is deemed ready to be scheduled if it contains only one vprop unification variable If so, constraints are stripped to their underlying definition based on vprop equivalence, introducing universally quantified variables when needed. Internal details of the encoding are removed through normalization, before calling the AC-unification tactic defined above *) /// Solves a `can_be_split` constraint let rec solve_can_be_split (args:list argv) : Tac bool = match args with | [(t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in try focus (fun _ -> apply_lemma (`equiv_can_be_split); dismiss_slprops(); // If we have exactly the same term on both side, // equiv_sl_implies would solve the goal immediately or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if rnbr = 0 then apply_lemma (`equiv_sym); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true with | _ -> let opened_some = try_open_existentials () in if opened_some then solve_can_be_split args // we only need args for their number of uvars, which has not changed else false ) else false | _ -> false // Ill-formed can_be_split, should not happen /// Solves a can_be_split_dep constraint let solve_can_be_split_dep (args:list argv) : Tac bool = match args with | [(p, _); (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> let p_bind = implies_intro () in apply_lemma (`equiv_can_be_split); dismiss_slprops (); or_else (fun _ -> let b = unify p (`true_p) in if not b then fail "could not unify SMT prop with True"; apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' true p (binding_to_term p_bind))); true ) else false | _ -> fail "ill-formed can_be_split_dep" /// Helper rewriting lemma val emp_unit_variant (p:vprop) : Lemma (ensures can_be_split p (p `star` emp)) /// Solves a can_be_split_forall constraint let solve_can_be_split_forall (args:list argv) : Tac bool = match args with | [_; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> ignore (forall_intro()); apply_lemma (`equiv_can_be_split); dismiss_slprops(); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed can_be_split_forall, should not happen" val solve_can_be_split_forall_dep_for : string -> Tot unit val solve_can_be_split_forall_dep_lookup : unit // FIXME: same as solve_can_be_split_for above let extract_cbs_forall_dep_contexts = extract_contexts (`can_be_split_forall_dep_congr_l) (`can_be_split_forall_dep_congr_r) (`solve_can_be_split_forall_dep_lookup) (`solve_can_be_split_forall_dep_for) let open_existentials_forall_dep () : Tac unit = let e = cur_env () in if Nil? (lookup_attr (`solve_can_be_split_forall_dep_lookup) e) then fail "Tactic disabled: no available lemmas in context"; norm [ delta_only [ `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%rm; ]; iota; delta_attr [`%__reduce__]; ]; let t0 = cur_goal () in match collect_app t0 with | _ (* squash/auto_squash *) , (t1, Q_Explicit) :: [] -> let hd, tl = collect_app t1 in if hd `is_fvar` (`%can_be_split_forall_dep) then match tl with | _ (* cond *) :: _ (* lhs *) :: (rhs, Q_Explicit) :: [] | (_, Q_Implicit) (* #a *) :: _ (* cond *) :: _ (* lhs *) :: (rhs, Q_Explicit) :: [] -> begin match inspect_unascribe rhs with | Tv_Abs _ body -> begin match extract_cbs_forall_dep_contexts body with | None -> fail "open_existentials_forall_dep: no candidate" | Some f -> apply_lemma (`can_be_split_forall_dep_trans_rev); dismiss_all_but_last (); split (); focus f; bring_last_goal_on_top (); if Cons? (goals ()) then norm [] end | _ -> fail "open_existentials_forall_dep : not an abstraction" end | _ -> fail "open_existentials_forall_dep : wrong number of arguments to can_be_split_forall_dep" else fail "open_existentials_forall_dep : not a can_be_split_forall_dep goal" | _ -> fail "open_existentials_forall_dep : not a squash/auto_squash goal" let try_open_existentials_forall_dep () : Tac bool = focus (fun _ -> try open_existentials_forall_dep (); true with _ -> false ) /// Solves a can_be_split_forall_dep constraint let rec solve_can_be_split_forall_dep (args:list argv) : Tac bool = match args with | [_; (pr, _); (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in try focus (fun _ -> norm []; let x = forall_intro () in let pr = mk_app pr [(binding_to_term x, Q_Explicit)] in let p_bind = implies_intro () in apply_lemma (`equiv_can_be_split); or_else (fun _ -> flip()) (fun _ -> ()); let pr = norm_term [] pr in or_else (fun _ -> let b = unify pr (`true_p) in if not b then fail "could not unify SMT prop with True"; apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' true pr (binding_to_term p_bind))); true with | Postpone msg -> false | TacticFailure msg -> let opened = try_open_existentials_forall_dep () in if opened then solve_can_be_split_forall_dep args // we only need args for their number of uvars, which has not changed else fail msg | _ -> fail "Unexpected exception in framing tactic" ) else false | _ -> fail "Ill-formed can_be_split_forall_dep, should not happen" /// Solves an equiv_forall constraint let solve_equiv_forall (args:list argv) : Tac bool = match args with | [_; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> apply_lemma (`equiv_forall_elim); match goals () with | [] -> () | _ -> dismiss_slprops (); ignore (forall_intro()); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed equiv_forall, should not happen" /// Solves an equiv constraint let solve_equiv (args:list argv) : Tac bool = match args with | [(t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip ()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed equiv, should not happen" /// Solves a can_be_split_post constraint let solve_can_be_split_post (args:list argv) : Tac bool = match args with | [_; _; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> norm[]; let g = _cur_goal () in ignore (forall_intro()); apply_lemma (`equiv_forall_elim); match goals () with | [] -> () | _ -> dismiss_slprops (); ignore (forall_intro()); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "ill-formed can_be_split_post" /// Checks whether any of the two terms was introduced during a Steel monadic return let is_return_eq (l r:term) : Tac bool = let nl, al = collect_app l in let nr, ar = collect_app r in is_fvar nl (`%return_pre) || is_fvar nr (`%return_pre) /// Solves indirection equalities introduced by the layered effects framework. /// If these equalities were introduced during a monadic return, they need to be solved /// at a later stage to avoid overly restricting contexts of unification variables let rec solve_indirection_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> if is_return_eq l r then later() else trefl(); solve_indirection_eqs (fuel - 1) | _ -> later(); solve_indirection_eqs (fuel - 1) /// Solve all equalities in the list of goals by calling the F* unifier let rec solve_all_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> trefl(); solve_all_eqs (fuel - 1) | _ -> later(); solve_all_eqs (fuel - 1) /// It is important to not normalize the return_pre eqs goals before unifying /// See test7 in FramingTestSuite for a detailed explanation let rec solve_return_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> trefl(); solve_return_eqs (fuel - 1) | _ -> later(); solve_return_eqs (fuel - 1) /// Strip annotations in a goal, to get to the underlying slprop equivalence let goal_to_equiv (loc:string) : Tac unit = let t = cur_goal () in let f = term_as_formula' t in match f with | App hd0 t -> if not (is_fvar hd0 (`%squash)) then fail (loc ^ " unexpected non-squash goal in goal_to_equiv"); let hd, args = collect_app t in if hd `is_fvar` (`%can_be_split) then ( apply_lemma (`equiv_can_be_split) ) else if hd `is_fvar` (`%can_be_split_forall) then ( ignore (forall_intro ()); apply_lemma (`equiv_can_be_split) ) else if hd `is_fvar` (`%equiv_forall) then ( apply_lemma (`equiv_forall_elim); ignore (forall_intro ()) ) else if hd `is_fvar` (`%can_be_split_post) then ( apply_lemma (`can_be_split_post_elim); dismiss_slprops(); ignore (forall_intro ()); ignore (forall_intro ()) ) else if hd `is_fvar` (`%can_be_split_dep) then ( fail ("can_be_split_dep not supported in " ^ loc) ) else if hd `is_fvar` (`%can_be_split_forall_dep) then ( fail ("can_be_split_forall_dep not supported in " ^ loc) ) else // This should never happen fail (loc ^ " goal in unexpected position") | _ -> fail (loc ^ " unexpected goal") let rec term_dict_assoc (#a: Type) (key: term) (l: list (term & a)) : Tac (list a) = match l with | [] -> [] | (k, v) :: q -> let q' = term_dict_assoc key q in if k `term_eq_old` key then (v :: q') else q' /// Returns true if the goal has been solved, false if it should be delayed let solve_or_delay (dict: list (term & (unit -> Tac bool))) : Tac bool = // Beta-reduce the goal first if possible norm []; let f = term_as_formula' (cur_goal ()) in match f with | App hd0 t -> if is_fvar hd0 (`%squash) then let hd, args = collect_app t in if hd `is_fvar` (`%can_be_split) then solve_can_be_split args else if hd `is_fvar` (`%can_be_split_forall) then solve_can_be_split_forall args else if hd `is_fvar` (`%equiv_forall) then solve_equiv_forall args else if hd `is_fvar` (`%can_be_split_post) then solve_can_be_split_post args else if hd `is_fvar` (`%equiv) then solve_equiv args else if hd `is_fvar` (`%can_be_split_dep) then solve_can_be_split_dep args else if hd `is_fvar` (`%can_be_split_forall_dep) then solve_can_be_split_forall_dep args else let candidates = term_dict_assoc hd dict in let run_tac (tac: unit -> Tac bool) () : Tac bool = focus tac in begin try first (List.Tot.map run_tac candidates) with _ -> (* this is a logical goal, solve it only if it has no uvars *) if List.Tot.length (free_uvars t) = 0 then (smt (); true) else false end else // TODO: handle non-squash goals here false | Comp (Eq _) l r -> let lnbr = List.Tot.length (free_uvars l) in let rnbr = List.Tot.length (free_uvars r) in // Only solve equality if one of the terms is completely determined if lnbr = 0 || rnbr = 0 then (trefl (); true) else false | _ -> false /// Returns true if it successfully solved a goal /// If it returns false, it means it didn't find any solvable goal, /// which should mean only delayed goals are left let rec vprop_term_uvars (t:term) : Tac (list int) = match inspect_unascribe t with | Tv_Uvar i' _ -> [i'] | Tv_App _ _ -> let hd, args = collect_app t in if is_star_or_unit hd then // Only count the number of unresolved slprops, not program implicits argv_uvars args else vprop_term_uvars hd | Tv_Abs _ t -> vprop_term_uvars t | _ -> [] and argv_uvars (args: list argv) : Tac (list int) = let t : unit -> Tac (list int) = fold_left (fun (n: unit -> Tac (list int)) (x, _) -> let t () : Tac (list int) = let l1 = n () in let l2 = vprop_term_uvars x in l1 `List.Tot.append` l2 in t ) (fun _ -> []) args in t () let rec remove_dups_from_sorted (#t: eqtype) (l: list t) : Tot (list t) = match l with | [] | [_] -> l | a1 :: a2 :: q -> if a1 = a2 then remove_dups_from_sorted (a2 :: q) else a1 :: remove_dups_from_sorted (a2 :: q) let simplify_list (l: list int) : Tot (list int) = remove_dups_from_sorted (List.Tot.sortWith (List.Tot.compare_of_bool (<)) l) let goal_term_uvars (t: term) : Tac (list int) = let hd, tl = collect_app t in if hd `is_fvar` (`%squash) then match tl with | [tl0, Q_Explicit] -> let _, tl1 = collect_app tl0 in simplify_list (argv_uvars tl1) | _ -> dump "ill-formed squash"; [] else [] let rec merge_sorted (l1 l2: list int) : Tot (list int) (decreases (List.Tot.length l1 + List.Tot.length l2)) = match l1 with | [] -> l2 | a1 :: q1 -> begin match l2 with | [] -> l1 | a2 :: q2 -> if a1 < a2 then a1 :: merge_sorted q1 l2 else if a2 < a1 then a2 :: merge_sorted l1 q2 else a1 :: merge_sorted q1 q2 end let rec sorted_lists_intersect (l1 l2: list int) : Tot bool (decreases (List.Tot.length l1 + List.Tot.length l2)) = match l1 with | [] -> false | a1 :: q1 -> begin match l2 with | [] -> false | a2 :: q2 -> if a1 = a2 then true else if a1 < a2 then sorted_lists_intersect q1 l2 else sorted_lists_intersect l1 q2 end /// TODO: cache the list of variables for each goal, to avoid computing them several times /// Compute the list of all vprop uvars that appear in the same goal as unsolved guard_vprop let rec compute_guarded_uvars1 (accu: list int) (g: list goal) : Tac (list int) = match g with | [] -> accu | a :: q -> let t = goal_type a in let accu' = if all_guards_solved t then accu else merge_sorted accu (goal_term_uvars t) in compute_guarded_uvars1 accu' q /// Enrich the list of vprop uvars with those that appear in the same goal let rec compute_guarded_uvars2 (accu: list int) (g: list goal) : Tac (list int) = match g with | [] -> accu | a :: q -> let t = goal_type a in let l = goal_term_uvars t in let accu' = if sorted_lists_intersect accu l then merge_sorted accu l else accu in compute_guarded_uvars2 accu' q let rec compute_guarded_uvars3 (accu: list int) (g: list goal) : Tac (list int) = let accu' = compute_guarded_uvars2 accu g in if accu = accu' then accu else compute_guarded_uvars3 accu' g let compute_guarded_uvars () : Tac (list int) = let g = goals () in let accu = compute_guarded_uvars1 [] g in compute_guarded_uvars3 accu g let rec pick_next (guarded_uvars: list int) (dict: _) (fuel: nat) : Tac bool = if fuel = 0 then false else match goals () with | [] -> true | a::_ -> let t = goal_type a in let l = goal_term_uvars t in let next () : Tac bool = later (); pick_next guarded_uvars dict (fuel - 1) in if sorted_lists_intersect guarded_uvars l then next () else if solve_or_delay dict then true else next () /// Main loop to schedule solving of goals. /// The goals () function fetches all current goals in the context let rec resolve_tac (dict: _) : Tac unit = match goals () with | [] -> () | g -> norm []; let guarded_uvars = compute_guarded_uvars () in // TODO: If it picks a goal it cannot solve yet, try all the other ones? if pick_next guarded_uvars dict (List.Tot.length g) then resolve_tac dict else fail "Could not make progress, no solvable goal found" let rec pick_next_logical (dict: _) (fuel: nat) : Tac bool = if fuel = 0 then false else match goals () with | [] -> true | _::_ -> if solve_or_delay dict then true else (later (); pick_next_logical dict (fuel - 1)) /// Special case for logical requires/ensures goals, which correspond only to equalities let rec resolve_tac_logical (dict: _) : Tac unit = match goals () with | [] -> () | g -> let fuel = List.Tot.length g in if pick_next_logical dict fuel then resolve_tac_logical dict else // This is only for requires/ensures constraints, which are equalities // There should always be a scheduling of constraints, but it can happen // that some uvar for the type of an equality is not resolved. // If we reach this point, we try to simply call the unifier instead of failing directly solve_all_eqs fuel /// Determining whether the type represented by term [t] corresponds to one of the logical (requires/ensures) goals let typ_contains_req_ens (t:term) : Tac bool = let name, _ = collect_app t in is_any_fvar name [`%req_t; `%ens_t; `%pure_wp; `%pure_pre; `%pure_post] /// Splits goals between separation logic goals (slgoals) and requires/ensures goals (loggoals) let rec filter_goals (l:list goal) : Tac (list goal * list goal) = match l with | [] -> [], [] | hd::tl -> let slgoals, loggoals = filter_goals tl in match term_as_formula' (goal_type hd) with | Comp (Eq t) _ _ -> if Some? t then let b = typ_contains_req_ens (Some?.v t) in if b then ( slgoals, hd::loggoals ) else ( hd::slgoals, loggoals ) else ( hd::slgoals, loggoals ) | App t _ -> if is_fvar t (`%squash) then hd::slgoals, loggoals else slgoals, loggoals | _ -> slgoals, loggoals let is_true (t:term) () : Tac unit = match term_as_formula t with | True_ -> exact (`()) | _ -> raise Goal_not_trivial /// Solve the maybe_emp goals: /// Normalize to unfold maybe_emp(_dep) and the reduce the if/then/else, and /// solve the goal (either an equality through trefl, or True through trivial) let rec solve_maybe_emps (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | _::_ -> let f = term_as_formula' (cur_goal ()) in ( match f with | App hd0 t -> if not (is_fvar hd0 (`%squash)) then later () else let hd, args = collect_app t in if hd `is_fvar` (`%maybe_emp) then (norm [delta_only [`%maybe_emp]; iota; zeta; primops; simplify]; let g = cur_goal () in or_else (is_true g) trefl) else if hd `is_fvar` (`%maybe_emp_dep) then (norm [delta_only [`%maybe_emp_dep]; iota; zeta; primops; simplify]; let g = cur_goal () in or_else (is_true g) (fun _ -> ignore (forall_intro ()); trefl ())) else later() | _ -> later() ); solve_maybe_emps (fuel - 1) /// Normalizes all the return_pre annotations once they are not needed anymore
false
false
Steel.Effect.Common.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val norm_return_pre (fuel: nat) : Tac unit
[ "recursion" ]
Steel.Effect.Common.norm_return_pre
{ "file_name": "lib/steel/Steel.Effect.Common.fsti", "git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e", "git_url": "https://github.com/FStarLang/steel.git", "project_name": "steel" }
fuel: Prims.nat -> FStar.Tactics.Effect.Tac Prims.unit
{ "end_col": 81, "end_line": 3062, "start_col": 2, "start_line": 3058 }
FStar.Tactics.Effect.Tac
val ite_soundness_tac: Prims.unit -> Tac unit
[ { "abbrev": false, "full_module": "FStar.Reflection.V2.Derived.Lemmas", "short_module": null }, { "abbrev": true, "full_module": "FStar.Algebra.CommMonoid.Equiv", "short_module": "CE" }, { "abbrev": false, "full_module": "FStar.Tactics.CanonCommMonoidSimple.Equiv", "short_module": null }, { "abbrev": false, "full_module": "FStar.Tactics.V2", "short_module": null }, { "abbrev": true, "full_module": "FStar.Tactics.V2", "short_module": "T" }, { "abbrev": false, "full_module": "FStar.Ghost", "short_module": null }, { "abbrev": true, "full_module": "FStar.FunctionalExtensionality", "short_module": "FExt" }, { "abbrev": true, "full_module": "Steel.Memory", "short_module": "Mem" }, { "abbrev": false, "full_module": "Steel.Memory", "short_module": null }, { "abbrev": false, "full_module": "Steel.Effect", "short_module": null }, { "abbrev": false, "full_module": "Steel.Effect", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let ite_soundness_tac () : Tac unit = let slgs, loggoals = filter_goals (goals ()) in set_goals slgs; solve_indirection_eqs (List.Tot.length slgs); // This is the actual subcomp goal. We can only solve it // once all uvars are solved let subcomp_goal = _cur_goal () in match goals () with | [] -> fail "should not happen" | _::tl -> set_goals tl; or_else (fun _ -> apply_lemma (`equiv_forall_refl)) assumption; or_else (fun _ -> or_else (fun _ -> apply_lemma (`can_be_split_dep_refl)) (fun _ -> apply_lemma (`can_be_split_refl)) // Different formalism in Steel.ST ) assumption; // Discharging the maybe_emp by SMT smt (); // Now propagating all equalities for the requires/ensures set_goals loggoals; resolve_tac_logical []; // Now taking care of the actual subcomp VC set_goals [subcomp_goal]; norm []; smt ()
val ite_soundness_tac: Prims.unit -> Tac unit let ite_soundness_tac () : Tac unit =
true
null
false
let slgs, loggoals = filter_goals (goals ()) in set_goals slgs; solve_indirection_eqs (List.Tot.length slgs); let subcomp_goal = _cur_goal () in match goals () with | [] -> fail "should not happen" | _ :: tl -> set_goals tl; or_else (fun _ -> apply_lemma (`equiv_forall_refl)) assumption; or_else (fun _ -> or_else (fun _ -> apply_lemma (`can_be_split_dep_refl)) (fun _ -> apply_lemma (`can_be_split_refl))) assumption; smt (); set_goals loggoals; resolve_tac_logical []; set_goals [subcomp_goal]; norm []; smt ()
{ "checked_file": "Steel.Effect.Common.fsti.checked", "dependencies": [ "Steel.Memory.fsti.checked", "prims.fst.checked", "FStar.Tactics.V2.fst.checked", "FStar.Tactics.CanonCommMonoidSimple.Equiv.fst.checked", "FStar.String.fsti.checked", "FStar.Squash.fsti.checked", "FStar.Set.fsti.checked", "FStar.Reflection.V2.Derived.Lemmas.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.Base.fst.checked", "FStar.List.Tot.fst.checked", "FStar.Ghost.fsti.checked", "FStar.FunctionalExtensionality.fsti.checked", "FStar.Classical.fsti.checked", "FStar.Algebra.CommMonoid.Equiv.fst.checked" ], "interface_file": false, "source_file": "Steel.Effect.Common.fsti" }
[]
[ "Prims.unit", "Prims.list", "FStar.Tactics.Types.goal", "FStar.Tactics.V2.Derived.fail", "FStar.Tactics.V2.Derived.smt", "FStar.Tactics.V2.Builtins.norm", "Prims.Nil", "FStar.Pervasives.norm_step", "FStar.Tactics.V2.Builtins.set_goals", "Prims.Cons", "Steel.Effect.Common.resolve_tac_logical", "FStar.Pervasives.Native.tuple2", "FStar.Tactics.NamedView.term", "Prims.bool", "FStar.Tactics.V2.Derived.or_else", "FStar.Tactics.V2.Derived.apply_lemma", "FStar.Tactics.V2.Derived.assumption", "FStar.Tactics.V2.Derived.goals", "FStar.Tactics.V2.Derived._cur_goal", "Steel.Effect.Common.solve_indirection_eqs", "FStar.List.Tot.Base.length", "Steel.Effect.Common.filter_goals" ]
[]
(* Copyright 2020 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module Steel.Effect.Common open Steel.Memory module Mem = Steel.Memory module FExt = FStar.FunctionalExtensionality open FStar.Ghost /// This module provides various predicates and functions which are common to the /// different Steel effects. /// It also contains the tactic responsible for frame inference through a variant of AC-unification #set-options "--ide_id_info_off" (* Normalization helpers *) irreducible let framing_implicit : unit = () irreducible let __steel_reduce__ : unit = () /// An internal attribute for finer-grained normalization in framing equalities irreducible let __inner_steel_reduce__ : unit = () irreducible let __reduce__ : unit = () irreducible let smt_fallback : unit = () irreducible let ite_attr : unit = () // Needed to avoid some logical vs prop issues during unification with no subtyping [@@__steel_reduce__] unfold let true_p : prop = True module T = FStar.Tactics.V2 let join_preserves_interp (hp:slprop) (m0:hmem hp) (m1:mem{disjoint m0 m1}) : Lemma (interp hp (join m0 m1)) [SMTPat (interp hp (join m0 m1))] = let open Steel.Memory in intro_emp m1; intro_star hp emp m0 m1; affine_star hp emp (join m0 m1) (* Definition of a selector for a given slprop *) /// A selector of type `a` for a separation logic predicate hp is a function /// from a memory where the predicate hp holds, which returns a value of type `a`. /// The effect GTot indicates that selectors are ghost functions, used for specification /// and proof purposes only let selector' (a:Type0) (hp:slprop) = hmem hp -> GTot a /// Self-framing property for selectors let sel_depends_only_on (#a:Type) (#hp:slprop) (sel:selector' a hp) = forall (m0:hmem hp) (m1:mem{disjoint m0 m1}). (interp_depends_only_on hp; ( sel m0 == sel (join m0 m1))) /// Additional property that selectors must satisfy, related to internals of /// the Steel memory model encoding let sel_depends_only_on_core (#a:Type) (#hp:slprop) (sel:selector' a hp) = forall (m0:hmem hp). sel m0 == sel (core_mem m0) /// Full definition of a selector, as a function which satisfies the two predicates above let selector (a:Type) (hp:slprop) : Type = sel:selector' a hp{sel_depends_only_on sel /\ sel_depends_only_on_core sel} /// The basis of our selector framework: Separation logic assertions enhanced with selectors /// Note that selectors are "optional", it is always possible to use a non-informative selector, /// such as fun _ -> () and to rely on the standard separation logic reasoning [@@ erasable] noeq type vprop' = { hp: slprop u#1; t:Type0; sel: selector t hp} (* Lifting the star operator to an inductive type makes normalization and implementing some later functions easier *) [@@__steel_reduce__; erasable] noeq type vprop = | VUnit : vprop' -> vprop | VStar: vprop -> vprop -> vprop (* A generic lift from slprop to vprop with a non-informative selector *) [@@ __steel_reduce__] let to_vprop' (p:slprop) = {hp = p; t = unit; sel = fun _ -> ()} [@@ __steel_reduce__] unfold let to_vprop (p:slprop) = VUnit (to_vprop' p) /// Normalization steps for norm below. /// All functions marked as `unfold`, or with the `__steel_reduce__` attribute will be reduced, /// as well as some functions internal to the selector framework unfold let normal_steps = [delta_attr [`%__steel_reduce__; `%__inner_steel_reduce__]; delta_only [`%Mkvprop'?.t; `%Mkvprop'?.hp; `%Mkvprop'?.sel; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit]; delta_qualifier ["unfold"]; iota;zeta;primops; simplify] /// The core normalization primitive used to simplify Verification Conditions before encoding /// them to an SMT solver. unfold let normal (#a:Type) (x:a) = norm normal_steps x /// An abbreviation for the VStar constructor, allowing to use it with infix notation [@@ __steel_reduce__; __reduce__] let star = VStar /// Extracting the underlying separation logic assertion from a vprop [@@ __steel_reduce__] let rec hp_of (p:vprop) = match p with | VUnit p -> p.hp | VStar p1 p2 -> hp_of p1 `Mem.star` hp_of p2 /// Extracting the selector type from a vprop [@@ __steel_reduce__] let rec t_of (p:vprop) = match p with | VUnit p -> p.t | VStar p1 p2 -> t_of p1 * t_of p2 /// Extracting the selector from a vprop [@@ __steel_reduce__] let rec sel_of (p:vprop) : GTot (selector (t_of p) (hp_of p)) = match p with | VUnit p -> fun h -> p.sel h | VStar p1 p2 -> let sel1 = sel_of p1 in let sel2 = sel_of p2 in fun h -> (sel1 h, sel2 h) /// Type abbreviations for separation logic pre- and postconditions of the Steel effects type pre_t = vprop type post_t (a:Type) = a -> vprop /// An annotation to indicate which separation logic predicates correspond to monadic computations /// These computations are handled in a specific manner in the framing tactic; they correspond to places where /// the context shrinks from all local variables in the computation to variables available at the toplevel let return_pre (p:vprop) : vprop = p noextract let hmem (p:vprop) = hmem (hp_of p) /// Abstract predicate for vprop implication. Currently implemented as an implication on the underlying slprop val can_be_split (p q:pre_t) : Type0 /// Exposing the implementation of `can_be_split` when needed for proof purposes val reveal_can_be_split (_:unit) : Lemma (forall p q. can_be_split p q == Mem.slimp (hp_of p) (hp_of q)) /// A targeted version of the above val can_be_split_interp (r r':vprop) (h:hmem r) : Lemma (requires can_be_split r r') (ensures interp (hp_of r') h) /// A dependent version of can_be_split, to be applied to dependent postconditions let can_be_split_forall (#a:Type) (p q:post_t a) = forall x. can_be_split (p x) (q x) /// A version of can_be_split which is indexed by a proposition, which can be used for equalities abduction let can_be_split_dep (p:prop) (t1 t2:pre_t) = p ==> can_be_split t1 t2 /// A dependent version of the above predicate let can_be_split_forall_dep (#a:Type) (p:a -> prop) (t1 t2:post_t a) = forall (x:a). p x ==> can_be_split (t1 x) (t2 x) (* Some lemmas about the can_be_split* predicates, to be used as rewriting rules for the abstract predicates *) val can_be_split_trans (p q r:vprop) : Lemma (requires p `can_be_split` q /\ q `can_be_split` r) (ensures p `can_be_split` r) let can_be_split_trans_rev (p q r:vprop) : Lemma (requires q `can_be_split` r /\ p `can_be_split` q) (ensures p `can_be_split` r) = can_be_split_trans p q r val can_be_split_star_l (p q:vprop) : Lemma (ensures (p `star` q) `can_be_split` p) [SMTPat ((p `star` q) `can_be_split` p)] val can_be_split_star_r (p q:vprop) : Lemma (ensures (p `star` q) `can_be_split` q) [SMTPat ((p `star` q) `can_be_split` q)] val can_be_split_refl (p:vprop) : Lemma (p `can_be_split` p) [SMTPat (p `can_be_split` p)] val can_be_split_congr_l (p q r: vprop) : Lemma (requires (p `can_be_split` q)) (ensures ((p `star` r) `can_be_split` (q `star` r))) val can_be_split_congr_r (p q r: vprop) : Lemma (requires (p `can_be_split` q)) (ensures ((r `star` p) `can_be_split` (r `star` q))) let prop_and (p1 p2: prop) : Tot prop = p1 /\ p2 let can_be_split_forall_dep_trans_rev (#a: Type) (cond1 cond2: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond2 q r /\ can_be_split_forall_dep cond1 p q)) (ensures (can_be_split_forall_dep (fun x -> cond1 x `prop_and` cond2 x) p r)) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_trans x y) z) let can_be_split_forall_dep_congr_l (#a: Type) (cond: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond p q)) (ensures (can_be_split_forall_dep cond (fun x -> p x `star` r x) (fun x -> q x `star` r x))) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_congr_l x y) z) let can_be_split_forall_dep_congr_r (#a: Type) (cond: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond p q)) (ensures (can_be_split_forall_dep cond (fun x -> r x `star` p x) (fun x -> r x `star` q x))) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_congr_r x y) z) /// To simplify the implementation of the framing tactic, dependent equivalence /// is defined as a double dependent implication let equiv_forall (#a:Type) (t1 t2:post_t a) : Type0 = t1 `can_be_split_forall` t2 /\ t2 `can_be_split_forall` t1 /// This equivalence models a context restriction at the end of a Steel computation; /// note that t2 does not depend on the value of type `a`, but the two vprops must be /// equivalent let can_be_split_post (#a #b:Type) (t1:a -> post_t b) (t2:post_t b) = forall (x:a). equiv_forall (t1 x) t2 /// Lifting the equivalence relation to vprops. Two vprops are equivalent if the underlying slprops /// are equivalent val equiv (p q:vprop) : prop /// Revealing the definition of vprop equivalence when needed for proof purposes. /// In other cases, the predicate is abstract val reveal_equiv (p q:vprop) : Lemma (p `equiv` q <==> hp_of p `Mem.equiv` hp_of q) (* A restricted view of the heap, that only allows to access selectors of the current slprop *) let rmem' (pre:vprop) = FExt.restricted_g_t (r0:vprop{can_be_split pre r0}) (fun r0 -> normal (t_of r0)) /// Ensuring that rmems encapsulate the structure induced by the separation logic star val valid_rmem (#frame:vprop) (h:rmem' frame) : prop unfold let rmem (pre:vprop) = h:rmem' pre{valid_rmem h} /// Exposing the definition of mk_rmem to better normalize Steel VCs unfold noextract let unrestricted_mk_rmem (r:vprop) (h:hmem r) = fun (r0:vprop{r `can_be_split` r0}) -> can_be_split_interp r r0 h; sel_of r0 h [@@ __inner_steel_reduce__] noextract let mk_rmem' (r:vprop) (h:hmem r) : Tot (rmem' r) = FExt.on_dom_g (r0:vprop{r `can_be_split` r0}) (unrestricted_mk_rmem r h) val lemma_valid_mk_rmem (r:vprop) (h:hmem r) : Lemma (valid_rmem (mk_rmem' r h)) [@@ __inner_steel_reduce__] noextract let mk_rmem (r:vprop) (h:hmem r) : Tot (rmem r) = lemma_valid_mk_rmem r h; mk_rmem' r h val reveal_mk_rmem (r:vprop) (h:hmem r) (r0:vprop{r `can_be_split` r0}) : Lemma (ensures reveal_can_be_split(); (mk_rmem r h) r0 == sel_of r0 h) (* Logical pre and postconditions can only access the restricted view of the heap *) type req_t (pre:pre_t) = rmem pre -> Type0 type ens_t (pre:pre_t) (a:Type) (post:post_t a) = rmem pre -> (x:a) -> rmem (post x) -> Type0 (* Empty assertion *) val emp : vprop /// When needed for proof purposes, the empty assertion is a direct lift of the /// empty assertion from Steel.Memory val reveal_emp (_:unit) : Lemma (hp_of emp == Mem.emp /\ t_of emp == unit) /// Lifting pure predicates to vprop [@@__steel_reduce__] unfold let pure (p:prop) = to_vprop (pure p) /// Framing predicates for the Steel effect. If the current computation has already /// been framed, then the additional frame is the empty predicate let maybe_emp (framed:bool) (frame:pre_t) = if framed then frame == emp else True /// Dependent version of the above predicate, usable in dependent postconditions let maybe_emp_dep (#a:Type) (framed:bool) (frame:post_t a) = if framed then (forall x. frame x == emp) else True (* focus_rmem is an additional restriction of our view of memory. We expose it here to be able to reduce through normalization; Any valid application of focus_rmem h will be reduced to the application of h *) [@@ __steel_reduce__] unfold let unrestricted_focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) = fun (r':vprop{can_be_split r0 r'}) -> can_be_split_trans r r0 r'; h r' [@@ __inner_steel_reduce__] let focus_rmem' (#r: vprop) (h: rmem r) (r0: vprop{r `can_be_split` r0}) : Tot (rmem' r0) = FExt.on_dom_g (r':vprop{can_be_split r0 r'}) (unrestricted_focus_rmem h r0) val lemma_valid_focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) : Lemma (valid_rmem (focus_rmem' h r0)) [@@ __inner_steel_reduce__] let focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) : Tot (rmem r0) = lemma_valid_focus_rmem h r0; focus_rmem' h r0 /// Exposing that calling focus_rmem on the current context corresponds to an equality let focus_rmem_refl (r:vprop) (h:rmem r) : Lemma (focus_rmem #r h r == h) = FStar.FunctionalExtensionality.extensionality_g _ _ (focus_rmem #r h r) h open FStar.Tactics.V2 /// State that all "atomic" subresources have the same selectors on both views. /// The predicate has the __steel_reduce__ attribute, ensuring that VC normalization /// will reduce it to a conjunction of equalities on atomic subresources /// This predicate is also marked as `strict_on_arguments` on [frame], ensuring that /// it will not be reduced when the frame is symbolic /// Instead, the predicate will be rewritten to an equality using `lemma_frame_equalities` below [@@ __steel_reduce__; strict_on_arguments [0]] let rec frame_equalities' (frame:vprop) (h0:rmem frame) (h1:rmem frame) : Type0 = begin match frame with | VUnit p -> h0 frame == h1 frame | VStar p1 p2 -> can_be_split_star_l p1 p2; can_be_split_star_r p1 p2; let h01 = focus_rmem h0 p1 in let h11 = focus_rmem h1 p1 in let h02 = focus_rmem h0 p2 in let h12 = focus_rmem h1 p2 in frame_equalities' p1 h01 h11 /\ frame_equalities' p2 h02 h12 end /// This lemma states that frame_equalities is the same as an equality on the top-level frame. /// The uncommon formulation with an extra [p] is needed to use in `rewrite_with_tactic`, /// where the goal is of the shape `frame_equalities frame h0 h1 == ?u` /// The rewriting happens below, in `frame_vc_norm` val lemma_frame_equalities (frame:vprop) (h0:rmem frame) (h1:rmem frame) (p:Type0) : Lemma (requires (h0 frame == h1 frame) == p) (ensures frame_equalities' frame h0 h1 == p) /// A special case for frames about emp. val lemma_frame_emp (h0:rmem emp) (h1:rmem emp) (p:Type0) : Lemma (requires True == p) (ensures frame_equalities' emp h0 h1 == p) /// A variant of conjunction elimination, suitable to the equality goals during rewriting val elim_conjunction (p1 p1' p2 p2':Type0) : Lemma (requires p1 == p1' /\ p2 == p2') (ensures (p1 /\ p2) == (p1' /\ p2')) /// Normalization and rewriting step for generating frame equalities. /// The frame_equalities function has the strict_on_arguments attribute on the [frame], /// ensuring that it is not reduced when the frame is symbolic. /// When that happens, we want to replace frame_equalities by an equality on the frame, /// mimicking reduction [@@plugin] let frame_vc_norm () : Tac unit = with_compat_pre_core 0 (fun _ -> // Do not normalize mk_rmem/focus_rmem to simplify application of // the reflexivity lemma on frame_equalities' norm [delta_attr [`%__steel_reduce__]; delta_only [`%Mkvprop'?.t; `%Mkvprop'?.hp; `%Mkvprop'?.sel; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit]; delta_qualifier ["unfold"]; iota;zeta;primops; simplify]; // After reduction, the term to rewrite might be of the shape // (frame_equalities' ... /\ frame_equalities' .. /\ ...) == ?u, // with some frame_equalities' possibly already fully reduced // We repeatedly split the clause and extract the term on the left // to generate equalities on atomic subresources ignore (repeat (fun _ -> // Try to split the conjunction. If there is no conjunction, we exit the repeat apply_lemma (`elim_conjunction); // Dismiss the two uvars created for the RHS, they'll be solved by unification dismiss (); dismiss (); // The first goal is the left conjunction split (); // Removes the frame equality if it is about emp or_else (fun _ -> apply_lemma (`lemma_frame_emp); dismiss()) (fun _ -> ()); // Rewrites the frame_equalities if it wasn't yet reduced or_else (fun _ -> apply_lemma (`lemma_frame_equalities); dismiss ()) (fun _ -> ()); norm normal_steps; // Finally solve the uvar, finishing the rewriting for this clause trefl () )); // Removes the frame equality if it is about emp or_else (fun _ -> apply_lemma (`lemma_frame_emp); dismiss()) (fun _ -> ()); // We do not have conjunctions anymore, we try to apply the frame_equalities rewriting // If it fails, the frame was not symbolic, so there is nothing to do or_else (fun _ -> apply_lemma (`lemma_frame_equalities); dismiss ()) (fun _ -> ()); norm normal_steps; trefl ()) [@@ __steel_reduce__] unfold let frame_equalities (frame:vprop) (h0:rmem frame) (h1:rmem frame) : prop = rewrite_with_tactic frame_vc_norm (frame_equalities' frame h0 h1) /// More lemmas about the abstract can_be_split predicates, to be used as /// rewriting rules in the tactic below val can_be_split_dep_refl (p:vprop) : Lemma (can_be_split_dep true_p p p) val equiv_can_be_split (p1 p2:vprop) : Lemma (requires p1 `equiv` p2) (ensures p1 `can_be_split` p2) val intro_can_be_split_frame (p q:vprop) (frame:vprop) : Lemma (requires q `equiv` (p `star` frame)) (ensures can_be_split q p /\ True) val can_be_split_post_elim (#a #b:Type) (t1:a -> post_t b) (t2:post_t b) : Lemma (requires (forall (x:a) (y:b). t1 x y `equiv` t2 y)) (ensures t1 `can_be_split_post` t2) val equiv_forall_refl (#a:Type) (t:post_t a) : Lemma (t `equiv_forall` t) val equiv_forall_elim (#a:Type) (t1 t2:post_t a) : Lemma (requires (forall (x:a). t1 x `equiv` t2 x)) (ensures t1 `equiv_forall` t2) open FStar.Tactics.CanonCommMonoidSimple.Equiv (* equiv is an equivalence relation on vprops *) /// Lemmas establishing the equivalence properties on equiv val equiv_refl (x:vprop) : Lemma (equiv x x) val equiv_sym (x y:vprop) : Lemma (requires equiv x y) (ensures equiv y x) val equiv_trans (x y z:vprop) : Lemma (requires equiv x y /\ equiv y z) (ensures equiv x z) module CE = FStar.Algebra.CommMonoid.Equiv /// Equiv is an equivalence relation for vprops elements inline_for_extraction noextract let req : CE.equiv vprop = CE.EQ equiv equiv_refl equiv_sym equiv_trans (* Star induces a commutative monoid for the equiv equivalence relation *) /// Lemmas establishing the commutative monoid properties val cm_identity (x:vprop) : Lemma ((emp `star` x) `equiv` x) val star_commutative (p1 p2:vprop) : Lemma ((p1 `star` p2) `equiv` (p2 `star` p1)) val star_associative (p1 p2 p3:vprop) : Lemma (((p1 `star` p2) `star` p3) `equiv` (p1 `star` (p2 `star` p3))) val star_congruence (p1 p2 p3 p4:vprop) : Lemma (requires p1 `equiv` p3 /\ p2 `equiv` p4) (ensures (p1 `star` p2) `equiv` (p3 `star` p4)) /// Star induces a commutative monoid on vprops [@__steel_reduce__] inline_for_extraction noextract let rm : CE.cm vprop req = CE.CM emp star cm_identity star_associative star_commutative star_congruence (*** Vprop combinators ***) (* Refining a vprop with a selector predicate *) /// Separation logic predicate stating the validity of a vprop with an additional refinement on its selector val vrefine_hp (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot (slprop u#1) /// Exposing the validity of the above predicate when needed for proof purposes val interp_vrefine_hp (v: vprop) (p: (normal (t_of v) -> Tot prop)) (m: mem) : Lemma (interp (vrefine_hp v p) m <==> (interp (hp_of v) m /\ p (sel_of v m))) /// Selector type for a refined vprop [@__steel_reduce__] let vrefine_t (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot Type = (x: t_of v {p x}) /// Selector of a refined vprop. Returns a value which satisfies the refinement predicate val vrefine_sel (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot (selector (vrefine_t v p) (vrefine_hp v p)) /// Exposing the definition of the refined selector val vrefine_sel_eq (v: vprop) (p: (normal (t_of v) -> Tot prop)) (m: Mem.hmem (vrefine_hp v p)) : Lemma ( interp (hp_of v) m /\ vrefine_sel v p m == sel_of v m ) // [SMTPat ((vrefine_sel v p) m)] // FIXME: this pattern causes Z3 "wrong number of argument" errors /// Combining the above pieces to define a vprop refined by a selector prediacte [@__steel_reduce__] let vrefine' (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot vprop' = { hp = vrefine_hp v p; t = vrefine_t v p; sel = vrefine_sel v p; } [@__steel_reduce__] let vrefine (v: vprop) (p: (normal (t_of v) -> Tot prop)) = VUnit (vrefine' v p) (* Dependent star for vprops *) /// Separation logic predicate corresponding to a dependent star, /// where the second predicate depends on the selector value of the first val vdep_hp (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot (slprop u#1) /// Exposing the validity of the above predicate when needed for proof purposes val interp_vdep_hp (v: vprop) (p: ( (t_of v) -> Tot vprop)) (m: mem) : Lemma (interp (vdep_hp v p) m <==> (interp (hp_of v) m /\ interp (hp_of v `Mem.star` hp_of (p (sel_of v m))) m)) /// Helper to define the selector type of the second component of the dependent star let vdep_payload (v: vprop) (p: ( (t_of v) -> Tot vprop)) (x: t_of v) : Tot Type = t_of (p x) /// Selector type for the dependent star: A dependent tuple, where the second component's type depends on the first vprop let vdep_t (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot Type = dtuple2 (t_of v) (vdep_payload v p) /// Selector for the dependent star val vdep_sel (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot (selector (vdep_t v p) (vdep_hp v p)) /// Exposing the definition of the dependent star's selector when needed for proof purposes val vdep_sel_eq (v: vprop) (p: ( (t_of v) -> Tot vprop)) (m: Mem.hmem (vdep_hp v p)) : Lemma ( interp (hp_of v) m /\ begin let x = sel_of v m in interp (hp_of (p x)) m /\ vdep_sel v p m == (| x, sel_of (p x) m |) end ) /// Combining the elements above to create a dependent star vprop [@__steel_reduce__] let vdep' (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot vprop' = { hp = vdep_hp v p; t = vdep_t v p; sel = vdep_sel v p; } [@__steel_reduce__] let vdep (v: vprop) (p: ( (t_of v) -> Tot vprop)) = VUnit (vdep' v p) (* Selector rewrite combinator *) /// The selector of a rewrite combinator applies a function `f` to the current selector of a vprop. val vrewrite_sel (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot (selector t (normal (hp_of v))) /// Exposing the definition of the above selector val vrewrite_sel_eq (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) (h: Mem.hmem (normal (hp_of v))) : Lemma ((vrewrite_sel v f <: selector' _ _) h == f ((normal (sel_of v) <: selector' _ _) h)) // [SMTPat (vrewrite_sel v f h)] // FIXME: this pattern causes Z3 "wrong number of argument" errors /// Combining the above elements to create a rewrite vprop [@__steel_reduce__] let vrewrite' (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot vprop' = { hp = normal (hp_of v); t = t; sel = vrewrite_sel v f; } [@__steel_reduce__] let vrewrite (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot vprop = VUnit (vrewrite' v f) (*** Framing tactic ***) (* Specialize visit_tm from the standard F* tactic library to reimplement name_appears_in. AF: As of Jan 14, 2021, calling name_appears_in from FStar.Tactics.Derived leads to a segfault *) exception Appears let on_sort_binder (f : term -> Tac unit) (b:binder) : Tac unit = f b.sort let rec visit_tm (ff : term -> Tac unit) (t : term) : Tac unit = let tv = inspect t in (match tv with | Tv_FVar _ | Tv_UInst _ _ | Tv_Var _ | Tv_BVar _ -> () | Tv_Type _ -> () | Tv_Const c -> () | Tv_Uvar i u -> () | Tv_Unsupp -> () | Tv_Unknown -> () | Tv_Arrow b c -> on_sort_binder ff b; visit_comp ff c | Tv_Abs b t -> let b = on_sort_binder (visit_tm ff) b in visit_tm ff t | Tv_App l (r, q) -> visit_tm ff l; visit_tm ff r | Tv_Refine b r -> on_sort_binder ff b; visit_tm ff r | Tv_Let r attrs b def t -> on_sort_binder ff b; visit_tm ff def; visit_tm ff t | Tv_Match sc _ brs -> visit_tm ff sc; iter (visit_br ff) brs | Tv_AscribedT e t topt _ -> visit_tm ff e; visit_tm ff t | Tv_AscribedC e c topt _ -> visit_tm ff e ); ff t and visit_br (ff : term -> Tac unit) (b:branch) : Tac unit = let (p, t) = b in visit_tm ff t and visit_comp (ff : term -> Tac unit) (c : comp) : Tac unit = let cv = inspect_comp c in match cv with | C_Total ret -> visit_tm ff ret | C_GTotal ret -> visit_tm ff ret | C_Lemma pre post pats -> visit_tm ff pre; visit_tm ff post; visit_tm ff pats | C_Eff us eff res args decrs -> visit_tm ff res; iter (fun (a, q) -> visit_tm ff a) args; iter (visit_tm ff) decrs /// Decides whether a top-level name [nm] syntactically /// appears in the term [t]. let name_appears_in (nm:name) (t:term) : Tac bool = let ff (t : term) : Tac unit = match inspect t with | Tv_FVar fv -> if inspect_fv fv = nm then raise Appears | t -> () in try ignore (visit_tm ff t); false with | Appears -> true | e -> raise e /// Checks whether term [t] appears in term [i] let term_appears_in (t:term) (i:term) : Tac bool = name_appears_in (explode_qn (term_to_string t)) i /// We define a small language to handle arbitrary separation logic predicates. /// Separation logic predicates are encoded as atoms for which equality is decidable, /// here represented as integers let atom : eqtype = int let rec atoms_to_string (l:list atom) = match l with | [] -> "" | hd::tl -> string_of_int hd ^ " " ^ atoms_to_string tl /// Reflecting the structure of our separation logic on atmos type exp : Type = | Unit : exp | Mult : exp -> exp -> exp | Atom : atom -> exp /// A map from atoms to the terms they represent. /// The second component of the term corresponds to a default element, /// ensuring we never raise an exception when trying to access an element in the map let amap (a:Type) = list (atom * a) * a /// An empty atom map: The list map is empty let const (#a:Type) (xa:a) : amap a = ([], xa) /// Accessing an element in the atom map // We reimplement List.Tot.Base.assoc because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_assoc (#key: eqtype) (#value: Type) (k: key) (dict: list (key & value)) : Pure (option value) (requires True) (ensures (fun res -> res == List.Tot.assoc k dict)) = match dict with | [] -> None | (k', v') :: q -> if k = k' then Some v' else my_assoc k q let select (#a:Type) (x:atom) (am:amap a) : Tot a = match my_assoc #atom #a x (fst am) with | Some a -> a | _ -> snd am /// Updating the atom map. Since select finds the first element corresponding to /// the atom in the list and we do not have any remove function, /// we can simply append the new element at the head without removing any possible /// previous element let update (#a:Type) (x:atom) (xa:a) (am:amap a) : amap a = (x, xa)::fst am, snd am /// Check whether the current term is an unresolved vprop unification variable. /// This can happen if either it is a uvar, or it is an unresolved dependent /// vprop uvar which is applied to some argument let is_uvar (t:term) : Tac bool = match inspect t with | Tv_Uvar _ _ -> true | Tv_App _ _ -> let hd, args = collect_app t in Tv_Uvar? (inspect hd) | _ -> false /// For a given term t, collect all terms in the list l with the same head symbol let rec get_candidates (t:term) (l:list term) : Tac (list term) = let name, _ = collect_app t in match l with | [] -> [] | hd::tl -> let n, _ = collect_app hd in if term_eq_old n name then ( hd::(get_candidates t tl) ) else get_candidates t tl /// Try to remove a term that is exactly matching, not just that can be unified let rec trivial_cancel (t:atom) (l:list atom) = match l with | [] -> false, l | hd::tl -> if hd = t then // These elements match, we remove them true, tl else (let b, res = trivial_cancel t tl in b, hd::res) /// Call trivial_cancel on all elements of l1. /// The first two lists returned are the remainders of l1 and l2. /// The last two lists are the removed parts of l1 and l2, with /// the additional invariant that they are equal let rec trivial_cancels (l1 l2:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l1 with | [] -> [], l2, [], [] | hd::tl -> let b, l2' = trivial_cancel hd l2 in let l1', l2', l1_del, l2_del = trivial_cancels tl l2' am in (if b then l1' else hd::l1'), l2', (if b then hd::l1_del else l1_del), (if b then hd::l2_del else l2_del) exception Failed exception Success /// Helper to print the terms corresponding to the current list of atoms let rec print_atoms (l:list atom) (am:amap term) : Tac string = match l with | [] -> "" | [hd] -> term_to_string (select hd am) | hd::tl -> term_to_string (select hd am) ^ " * " ^ print_atoms tl am /// For a list of candidates l, count the number that can unify with t. /// Does not try to unify with a uvar, this will be done at the very end. /// Tries to unify with slprops with a different head symbol, it might /// be an abbreviation let rec try_candidates (t:atom) (l:list atom) (am:amap term) : Tac (atom * int) = match l with | [] -> t, 0 | hd::tl -> if is_uvar (select hd am) then (try_candidates t tl am) else // Encapsulate unify in a try/with to ensure unification is not actually performed let res = try if unify (select t am) (select hd am) then raise Success else raise Failed with | Success -> true | _ -> false in let t', n' = try_candidates t tl am in if res && hd <> t' then hd, 1 + n' else t', n' /// Remove the given term from the list. Only to be called when /// try_candidates succeeded let rec remove_from_list (t:atom) (l:list atom) : Tac (list atom) = match l with | [] -> fail "atom in remove_from_list not found: should not happen"; [] | hd::tl -> if t = hd then tl else hd::remove_from_list t tl /// Check if two lists of slprops are equivalent by recursively calling /// try_candidates. /// Assumes that only l2 contains terms with the head symbol unresolved. /// It returns all elements that were not resolved during this iteration *) let rec equivalent_lists_once (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l1 with | [] -> [], l2, l1_del, l2_del | hd::tl -> let t, n = try_candidates hd l2 am in if n = 1 then ( let l2 = remove_from_list t l2 in equivalent_lists_once tl l2 (hd::l1_del) (t::l2_del) am ) else ( // Either too many candidates for this scrutinee, or no candidate but the uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once tl l2 l1_del l2_del am in hd::rem1, rem2, l1'_del, l2'_del ) /// Check if two lists of slprops are equivalent by recursively calling /// try_candidates by iterating on l2. /// Assumes that only l2 contains terms with the head symbol unresolved. /// It returns all elements that were not resolved during this iteration *) /// This is very close to equivalent_lists_once above, but helps making progress /// when l1 contains syntactically equal candidates let rec equivalent_lists_once_l2 (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l2 with | [] -> l1, [], l1_del, l2_del | hd::tl -> if is_uvar (select hd am) then // We do not try to match the vprop uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once_l2 l1 tl l1_del l2_del am in rem1, hd::rem2, l1'_del, l2'_del else ( let t, n = try_candidates hd l1 am in if n = 1 then ( let l1 = remove_from_list t l1 in equivalent_lists_once_l2 l1 tl (t::l1_del) (hd::l2_del) am ) else ( // Either too many candidates for this scrutinee, or no candidate but the uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once_l2 l1 tl l1_del l2_del am in rem1, hd::rem2, l1'_del, l2'_del ) ) let get_head (l:list atom) (am:amap term) : term = match l with | [] -> `() | hd::_ -> select hd am /// Checks whether the list of atoms [l] only contains one unresolved uvar let is_only_uvar (l:list atom) (am:amap term) : Tac bool = if List.Tot.Base.length l = 1 then is_uvar (select (List.Tot.Base.hd l) am) else false /// Assumes that u is a uvar, checks that all variables in l can be unified with it. /// Later in the tactic, the uvar will be unified to a star of l *) let rec try_unifying_remaining (l:list atom) (u:term) (am:amap term) : Tac unit = match l with | [] -> () | hd::tl -> try if unify u (select hd am) then raise Success else raise Failed with | Success -> try_unifying_remaining tl u am | _ -> fail ("could not find candidate for scrutinee " ^ term_to_string (select hd am)) /// Is SMT rewriting enabled for this binder let is_smt_binder (b:binder) : Tac bool = let l = b.attrs in not (List.Tot.isEmpty (filter (fun t -> is_fvar t (`%smt_fallback)) l)) /// Creates a new term, where all arguments where SMT rewriting is enabled have been replaced /// by fresh, unconstrained unification variables let rec new_args_for_smt_attrs (env:env) (l:list argv) (ty:typ) : Tac (list argv * list term) = let fresh_ghost_uvar ty = let e = cur_env () in ghost_uvar_env e ty in match l, inspect_unascribe ty with | (arg, aqualv)::tl, Tv_Arrow binder comp -> let needs_smt = is_smt_binder binder in let new_hd = if needs_smt then ( let arg_ty = tc env arg in let uvar = fresh_ghost_uvar arg_ty in unshelve uvar; flip (); (uvar, aqualv) ) else (arg, aqualv) in begin let ty2 = match inspect_comp comp with | C_Total ty2 -> ty2 | C_Eff _ eff_name ty2 _ _ -> if eff_name = ["Prims"; "Tot"] then ty2 else fail "computation type not supported in definition of slprops" | _ -> fail "computation type not supported in definition of slprops" in let tl_argv, tl_terms = new_args_for_smt_attrs env tl ty2 in new_hd::tl_argv, (if needs_smt then arg::tl_terms else tl_terms) end | [], Tv_FVar fv -> [], [] | _ -> fail "should not happen. Is an slprop partially applied?" /// Rewrites all terms in the context to enable SMT rewriting through the use of fresh, unconstrained unification variables let rewrite_term_for_smt (env:env) (am:amap term * list term) (a:atom) : Tac (amap term * list term) = let am, prev_uvar_terms = am in let term = select a am in let hd, args = collect_app term in let t = tc env hd in let new_args, uvar_terms = new_args_for_smt_attrs env args t in let new_term = mk_app hd new_args in update a new_term am, List.Tot.append uvar_terms prev_uvar_terms /// User-facing error message when the framing tactic fails let fail_atoms (#a:Type) (l1 l2:list atom) (am:amap term) : Tac a = fail ("could not find a solution for unifying\n" ^ print_atoms l1 am ^ "\nand\n" ^ print_atoms l2 am) /// Variant of equivalent_lists' below to be called once terms have been rewritten to allow SMT rewriting. /// If unification succeeds and we have unicity of the solution, this tactic will succeed, /// and ultimately create an SMT guard that the two terms are actually equal let rec equivalent_lists_fallback (n:nat) (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * bool) = match l1 with | [] -> begin match l2 with | [] -> (l1_del, l2_del, false) | [hd] -> // Succeed if there is only one uvar left in l2, which can be therefore // be unified with emp if is_uvar (select hd am) then ( // xsdenote is left associative: We put hd at the top to get // ?u `star` p <==> emp `star` p (l1_del, hd :: l2_del, true)) else fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) | _ -> fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) end | _ -> if is_only_uvar l2 am then ( // Terms left in l1, but only a uvar left in l2. // Put all terms left at the end of l1_rem, so that they can be unified // with exactly the uvar because of the structure of xsdenote try_unifying_remaining l1 (get_head l2 am) am; l1_del `List.Tot.append` l1, l2_del `List.Tot.append` l2, false ) else let rem1, rem2, l1_del', l2_del' = equivalent_lists_once l1 l2 l1_del l2_del am in let n' = List.Tot.length rem1 in if n' >= n then // Should always be smaller or equal to n // If it is equal, no progress was made. fail_atoms rem1 rem2 am else equivalent_lists_fallback n' rem1 rem2 l1_del' l2_del' am /// Iterates over all terms in [l2] to prepare them for unification with SMT rewriting let replace_smt_uvars (l1 l2:list atom) (am:amap term) : Tac (amap term * list term) = let env = cur_env () in fold_left (rewrite_term_for_smt env) (am, []) l2 /// Recursively calls equivalent_lists_once. /// Stops when we're done with unification, or when we didn't make any progress /// If we didn't make any progress, we have too many candidates for some terms. /// Accumulates rewritings of l1 and l2 in l1_del and l2_del, with the invariant /// that the two lists are unifiable at any point /// The boolean indicates if there is a leftover empty frame let rec equivalent_lists' (n:nat) (use_smt:bool) (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * bool * list term) = match l1 with | [] -> begin match l2 with | [] -> (l1_del, l2_del, false, []) | [hd] -> // Succeed if there is only one uvar left in l2, which can be therefore // be unified with emp if is_uvar (select hd am) then ( // xsdenote is left associative: We put hd at the top to get // ?u `star` p <==> emp `star` p (l1_del, hd :: l2_del, true, [])) else fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) | _ -> fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) end | _ -> if is_only_uvar l2 am then ( // Terms left in l1, but only a uvar left in l2. // Put all terms left at the end of l1_rem, so that they can be unified // with exactly the uvar because of the structure of xsdenote try_unifying_remaining l1 (get_head l2 am) am; l1_del `List.Tot.append` l1, l2_del `List.Tot.append` l2, false, [] ) else let rem1, rem2, l1_del', l2_del' = equivalent_lists_once l1 l2 l1_del l2_del am in let n' = List.Tot.length rem1 in if n' >= n then ( // Try to make progress by matching non-uvars of l2 with candidates in l1 let rem1, rem2, l1_del', l2_del' = equivalent_lists_once_l2 rem1 rem2 l1_del' l2_del' am in let n' = List.Tot.length rem1 in if n' >= n then ( // Should always be smaller or equal to n // If it is equal, no progress was made. if use_smt then // SMT fallback is allowed let new_am, uvar_terms = replace_smt_uvars rem1 rem2 am in let l1_f, l2_f, b = equivalent_lists_fallback n' rem1 rem2 l1_del' l2_del' new_am in l1_f, l2_f, b, uvar_terms else fail_atoms rem1 rem2 am ) else equivalent_lists' n' use_smt rem1 rem2 l1_del' l2_del' am ) else equivalent_lists' n' use_smt rem1 rem2 l1_del' l2_del' am /// Checks if term for atom t unifies with fall uvars in l let rec unifies_with_all_uvars (t:term) (l:list atom) (am:amap term) : Tac bool = match l with | [] -> true | hd::tl -> if unifies_with_all_uvars t tl am then ( // Unified with tail, try this term let hd_t = select hd am in if is_uvar hd_t then ( // The head term is a uvar, try unifying try if unify t hd_t then raise Success else raise Failed with | Success -> true | _ -> false ) else true // The uvar is not a head term, we do not need to try it ) else false /// Puts all terms in l1 that cannot unify with the uvars in l2 at the top: /// They need to be solved first let rec most_restricted_at_top (l1 l2:list atom) (am:amap term) : Tac (list atom) = match l1 with | [] -> [] | hd::tl -> if unifies_with_all_uvars (select hd am) l2 am then (most_restricted_at_top tl l2 am) `List.Tot.append` [hd] else hd::(most_restricted_at_top tl l2 am) /// Core AC-unification tactic. /// First remove all trivially equal terms, then try to decide equivalence. /// Assumes that l1 does not contain any vprop uvar. /// If it succeeds, returns permutations of l1, l2, and a boolean indicating /// if l2 has a trailing empty frame to be unified let equivalent_lists (use_smt:bool) (l1 l2:list atom) (am:amap term) : Tac (list atom * list atom * bool * list term) = let l1, l2, l1_del, l2_del = trivial_cancels l1 l2 am in let l1 = most_restricted_at_top l1 l2 am in let n = List.Tot.length l1 in let l1_del, l2_del, emp_frame, uvar_terms = equivalent_lists' n use_smt l1 l2 l1_del l2_del am in l1_del, l2_del, emp_frame, uvar_terms (* Helpers to relate the actual terms to their representation as a list of atoms *) open FStar.Reflection.V2.Derived.Lemmas let rec list_to_string (l:list term) : Tac string = match l with | [] -> "end" | hd::tl -> term_to_string hd ^ " " ^ list_to_string tl let rec mdenote_gen (#a:Type u#aa) (unit:a) (mult:a -> a -> a) (am:amap a) (e:exp) : a = match e with | Unit -> unit | Atom x -> select x am | Mult e1 e2 -> mult (mdenote_gen unit mult am e1) (mdenote_gen unit mult am e2) let rec xsdenote_gen (#a:Type) (unit:a) (mult:a -> a -> a) (am:amap a) (xs:list atom) : a = match xs with | [] -> unit | [x] -> select x am | x::xs' -> mult (select x am) (xsdenote_gen unit mult am xs') unfold let mdenote (#a:Type u#aa) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e:exp) : a = let open FStar.Algebra.CommMonoid.Equiv in mdenote_gen (CM?.unit m) (CM?.mult m) am e unfold let xsdenote (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs:list atom) : a = let open FStar.Algebra.CommMonoid.Equiv in xsdenote_gen (CM?.unit m) (CM?.mult m) am xs // We reimplement List.Tot.Base.append because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_append (#t: Type) (l1 l2: list t) : Pure (list t) (requires True) (ensures (fun res -> res == l1 `List.Tot.append` l2)) (decreases l1) = match l1 with | [] -> l2 | a :: q -> a :: my_append q l2 let rec flatten (e:exp) : list atom = match e with | Unit -> [] | Atom x -> [x] | Mult e1 e2 -> flatten e1 `my_append` flatten e2 let rec flatten_correct_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs1 xs2:list atom) : Lemma (xsdenote eq m am (xs1 `my_append` xs2) `CE.EQ?.eq eq` CE.CM?.mult m (xsdenote eq m am xs1) (xsdenote eq m am xs2)) = let open FStar.Algebra.CommMonoid.Equiv in match xs1 with | [] -> CM?.identity m (xsdenote eq m am xs2); EQ?.symmetry eq (CM?.mult m (CM?.unit m) (xsdenote eq m am xs2)) (xsdenote eq m am xs2) | [x] -> ( if (Nil? xs2) then (right_identity eq m (select x am); EQ?.symmetry eq (CM?.mult m (select x am) (CM?.unit m)) (select x am)) else EQ?.reflexivity eq (CM?.mult m (xsdenote eq m am [x]) (xsdenote eq m am xs2))) | x::xs1' -> flatten_correct_aux eq m am xs1' xs2; EQ?.reflexivity eq (select x am); CM?.congruence m (select x am) (xsdenote eq m am (xs1' `my_append` xs2)) (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2)); CM?.associativity m (select x am) (xsdenote eq m am xs1') (xsdenote eq m am xs2); EQ?.symmetry eq (CM?.mult m (CM?.mult m (select x am) (xsdenote eq m am xs1')) (xsdenote eq m am xs2)) (CM?.mult m (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2))); EQ?.transitivity eq (CM?.mult m (select x am) (xsdenote eq m am (xs1' `my_append` xs2))) (CM?.mult m (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2))) (CM?.mult m (CM?.mult m (select x am) (xsdenote eq m am xs1')) (xsdenote eq m am xs2)) let rec flatten_correct (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e:exp) : Lemma (mdenote eq m am e `CE.EQ?.eq eq` xsdenote eq m am (flatten e)) = let open FStar.Algebra.CommMonoid.Equiv in match e with | Unit -> EQ?.reflexivity eq (CM?.unit m) | Atom x -> EQ?.reflexivity eq (select x am) | Mult e1 e2 -> flatten_correct_aux eq m am (flatten e1) (flatten e2); EQ?.symmetry eq (xsdenote eq m am (flatten e1 `my_append` flatten e2)) (CM?.mult m (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2))); flatten_correct eq m am e1; flatten_correct eq m am e2; CM?.congruence m (mdenote eq m am e1) (mdenote eq m am e2) (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2)); EQ?.transitivity eq (CM?.mult m (mdenote eq m am e1) (mdenote eq m am e2)) (CM?.mult m (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2))) (xsdenote eq m am (flatten e1 `my_append` flatten e2)) let monoid_reflect (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e1 e2:exp) (_ : squash (xsdenote eq m am (flatten e1) `CE.EQ?.eq eq` xsdenote eq m am (flatten e2))) : squash (mdenote eq m am e1 `CE.EQ?.eq eq` mdenote eq m am e2) = flatten_correct eq m am e1; flatten_correct eq m am e2; CE.EQ?.symmetry eq (mdenote eq m am e2) (xsdenote eq m am (flatten e2)); CE.EQ?.transitivity eq (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2)) (mdenote eq m am e2); CE.EQ?.transitivity eq (mdenote eq m am e1) (xsdenote eq m am (flatten e1)) (mdenote eq m am e2) // Here we sort the variable numbers // We reimplement List.Tot.Base.sortWith because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_partition (#a: Type) (f: (a -> Tot bool)) (l: list a) : Pure (list a & list a) (requires True) (ensures (fun res -> res == List.Tot.partition f l)) = match l with | [] -> [], [] | hd::tl -> let l1, l2 = my_partition f tl in if f hd then hd::l1, l2 else l1, hd::l2 let rec partition_ext (#a: Type) (f1 f2: (a -> Tot bool)) (l: list a) : Lemma (requires (forall x . f1 x == f2 x)) (ensures (List.Tot.partition f1 l == List.Tot.partition f2 l)) = match l with | [] -> () | hd::tl -> partition_ext f1 f2 tl let my_bool_of_compare (#a: Type) (f: a -> a -> Tot int) (x: a) (y: a) : Tot bool = f x y < 0 let rec my_sortWith (#a: Type) (f: (a -> a -> Tot int)) (l:list a) : Pure (list a) (requires True) (ensures (fun res -> res == List.Tot.sortWith f l)) (decreases (List.Tot.length l)) = match l with | [] -> [] | pivot::tl -> let hi, lo = my_partition (my_bool_of_compare f pivot) tl in partition_ext (my_bool_of_compare f pivot) (List.Tot.bool_of_compare f pivot) tl; List.Tot.partition_length (List.Tot.bool_of_compare f pivot) tl; my_append (my_sortWith f lo) (pivot::my_sortWith f hi) let rec sortWith_ext (#a: Type) (f1 f2: (a -> a -> Tot int)) (l: list a) : Lemma (requires (forall x y . f1 x y == f2 x y)) (ensures (List.Tot.sortWith f1 l == List.Tot.sortWith f2 l)) (decreases (List.Tot.length l)) = match l with | [] -> () | pivot::tl -> partition_ext (List.Tot.bool_of_compare f1 pivot) (List.Tot.bool_of_compare f2 pivot) tl; List.Tot.partition_length (List.Tot.bool_of_compare f1 pivot) tl; let hi, lo = List.Tot.partition (List.Tot.bool_of_compare f1 pivot) tl in sortWith_ext f1 f2 lo; sortWith_ext f1 f2 hi let permute = list atom -> list atom let my_compare_of_bool (#a:eqtype) (rel: a -> a -> Tot bool) (x: a) (y: a) : Tot int = if x `rel` y then -1 else if x = y then 0 else 1 let sort : permute = my_sortWith #int (my_compare_of_bool (<)) #push-options "--fuel 1 --ifuel 1" let lemma_xsdenote_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (hd:atom) (tl:list atom) : Lemma (xsdenote eq m am (hd::tl) `CE.EQ?.eq eq` (CE.CM?.mult m (select hd am) (xsdenote eq m am tl))) = let open FStar.Algebra.CommMonoid.Equiv in match tl with | [] -> assert (xsdenote eq m am (hd::tl) == select hd am); CM?.identity m (select hd am); EQ?.symmetry eq (CM?.unit m `CM?.mult m` select hd am) (select hd am); CM?.commutativity m (CM?.unit m) (select hd am); EQ?.transitivity eq (xsdenote eq m am (hd::tl)) (CM?.unit m `CM?.mult m` select hd am) (CM?.mult m (select hd am) (xsdenote eq m am tl)) | _ -> EQ?.reflexivity eq (xsdenote eq m am (hd::tl)) let rec partition_equiv (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (pivot:atom) (q:list atom) : Lemma (let open FStar.List.Tot.Base in let hi, lo = partition (bool_of_compare (compare_of_bool (<)) pivot) q in CE.EQ?.eq eq (xsdenote eq m am hi `CE.CM?.mult m` xsdenote eq m am lo) (xsdenote eq m am q)) = let open FStar.Algebra.CommMonoid.Equiv in let open FStar.List.Tot.Base in let f = bool_of_compare (compare_of_bool (<)) pivot in let hi, lo = partition f q in match q with | [] -> CM?.identity m (xsdenote eq m am hi) | hd::tl -> let l1, l2 = partition f tl in partition_equiv eq m am pivot tl; assert (EQ?.eq eq (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (xsdenote eq m am tl)); EQ?.reflexivity eq (xsdenote eq m am l1); EQ?.reflexivity eq (xsdenote eq m am l2); EQ?.reflexivity eq (xsdenote eq m am hi); EQ?.reflexivity eq (xsdenote eq m am lo); if f hd then begin assert (hi == hd::l1 /\ lo == l2); lemma_xsdenote_aux eq m am hd l1; CM?.congruence m (xsdenote eq m am hi) (xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am l1) (xsdenote eq m am l2); CM?.associativity m (select hd am) (xsdenote eq m am l1) (xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) ((select hd am `CM?.mult m` xsdenote eq m am l1) `CM?.mult m` xsdenote eq m am l2) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)); EQ?.reflexivity eq (select hd am); CM?.congruence m (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (select hd am) (xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)) (select hd am `CM?.mult m` xsdenote eq m am tl); lemma_xsdenote_aux eq m am hd tl; EQ?.symmetry eq (xsdenote eq m am (hd::tl)) (select hd am `CM?.mult m` xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am tl) (xsdenote eq m am (hd::tl)) end else begin assert (hi == l1 /\ lo == hd::l2); lemma_xsdenote_aux eq m am hd l2; CM?.congruence m (xsdenote eq m am hi) (xsdenote eq m am lo) (xsdenote eq m am l1) (select hd am `CM?.mult m` xsdenote eq m am l2); CM?.commutativity m (xsdenote eq m am l1) (select hd am `CM?.mult m` xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (xsdenote eq m am l1 `CM?.mult m` (select hd am `CM?.mult m` xsdenote eq m am l2)) ((select hd am `CM?.mult m` xsdenote eq m am l2) `CM?.mult m` xsdenote eq m am l1); CM?.associativity m (select hd am) (xsdenote eq m am l2) (xsdenote eq m am l1); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) ((select hd am `CM?.mult m` xsdenote eq m am l2) `CM?.mult m` xsdenote eq m am l1) (select hd am `CM?.mult m` (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1)); CM?.commutativity m (xsdenote eq m am l2) (xsdenote eq m am l1); EQ?.reflexivity eq (select hd am); CM?.congruence m (select hd am) (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1) (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1)) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)); CM?.congruence m (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (select hd am) (xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)) (select hd am `CM?.mult m` xsdenote eq m am tl); lemma_xsdenote_aux eq m am hd tl; EQ?.symmetry eq (xsdenote eq m am (hd::tl)) (select hd am `CM?.mult m` xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am tl) (xsdenote eq m am (hd::tl)) end let rec sort_correct_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs:list atom) : Lemma (requires True) (ensures xsdenote eq m am xs `CE.EQ?.eq eq` xsdenote eq m am (sort xs)) (decreases (FStar.List.Tot.Base.length xs)) = let open FStar.Algebra.CommMonoid.Equiv in match xs with | [] -> EQ?.reflexivity eq (xsdenote eq m am []) | pivot::q -> let sort0 : permute = List.Tot.sortWith #int (List.Tot.compare_of_bool (<)) in let sort_eq (l: list atom) : Lemma (sort l == sort0 l) [SMTPat (sort l)] = sortWith_ext (my_compare_of_bool (<)) (List.Tot.compare_of_bool (<)) l in let open FStar.List.Tot.Base in let f:int -> int -> int = compare_of_bool (<) in let hi, lo = partition (bool_of_compare f pivot) q in flatten_correct_aux eq m am (sort lo) (pivot::sort hi); assert (xsdenote eq m am (sort xs) `EQ?.eq eq` CM?.mult m (xsdenote eq m am (sort lo)) (xsdenote eq m am (pivot::sort hi))); lemma_xsdenote_aux eq m am pivot (sort hi); EQ?.reflexivity eq (xsdenote eq m am (sort lo)); CM?.congruence m (xsdenote eq m am (sort lo)) (xsdenote eq m am (pivot::sort hi)) (xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` xsdenote eq m am (pivot::sort hi)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)))); CM?.commutativity m (xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)); CM?.associativity m (select pivot am) (xsdenote eq m am (sort hi)) (xsdenote eq m am (sort lo)); EQ?.transitivity eq (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))) ((select pivot am `CM?.mult m` xsdenote eq m am (sort hi)) `CM?.mult m` xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)))); partition_length (bool_of_compare f pivot) q; sort_correct_aux eq m am hi; sort_correct_aux eq m am lo; EQ?.symmetry eq (xsdenote eq m am lo) (xsdenote eq m am (sort lo)); EQ?.symmetry eq (xsdenote eq m am hi) (xsdenote eq m am (sort hi)); CM?.congruence m (xsdenote eq m am (sort hi)) (xsdenote eq m am (sort lo)) (xsdenote eq m am hi) (xsdenote eq m am lo); assert (EQ?.eq eq (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)); EQ?.reflexivity eq (select pivot am); CM?.congruence m (select pivot am) (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)) (select pivot am) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo))); partition_equiv eq m am pivot q; CM?.congruence m (select pivot am) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select pivot am) (xsdenote eq m am q); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)) (select pivot am `CM?.mult m` (xsdenote eq m am q)); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am q))); lemma_xsdenote_aux eq m am pivot q; EQ?.symmetry eq (xsdenote eq m am (pivot::q)) (select pivot am `CM?.mult m` (xsdenote eq m am q)); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am q)) (xsdenote eq m am xs); EQ?.symmetry eq (xsdenote eq m am (sort xs)) (xsdenote eq m am xs) #pop-options #push-options "--fuel 0 --ifuel 0" (* Lemmas to be called after a permutation compatible with AC-unification was found *) let smt_reflexivity (#a:Type) (eq:CE.equiv a) (x y:a) : Lemma (requires x == y) (ensures CE.EQ?.eq eq x y) = CE.EQ?.reflexivity eq x let identity_left_smt (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x y:a) : Lemma (requires x == y) (ensures CE.EQ?.eq eq x (CE.CM?.mult m (CE.CM?.unit m) y)) = CE.CM?.identity m x; CE.EQ?.symmetry eq (CE.CM?.mult m (CE.CM?.unit m) x) x let identity_left (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x:a) : Lemma (CE.EQ?.eq eq x (CE.CM?.mult m (CE.CM?.unit m) x)) = CE.CM?.identity m x; CE.EQ?.symmetry eq (CE.CM?.mult m (CE.CM?.unit m) x) x let identity_right_diff (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x y:a) : Lemma (requires CE.EQ?.eq eq x y) (ensures CE.EQ?.eq eq (CE.CM?.mult m x (CE.CM?.unit m)) y) = CE.right_identity eq m x; CE.EQ?.transitivity eq (CE.CM?.mult m x (CE.CM?.unit m)) x y /// Dismiss possible vprops goals that might have been created by lemma application. /// These vprops will be instantiated at a later stage; else, Meta-F* will raise an error let rec dismiss_slprops () : Tac unit = match term_as_formula' (cur_goal ()) with | App t _ -> if is_fvar t (`%squash) then () else (dismiss(); dismiss_slprops ()) | _ -> dismiss(); dismiss_slprops () /// Recursively removing trailing empty assertions let rec n_identity_left (n:int) (eq m:term) : Tac unit = if n = 0 then ( apply_lemma (`(CE.EQ?.reflexivity (`#eq))); // Cleaning up, in case a uvar has been generated here. It'll be solved later set_goals []) else ( apply_lemma (`identity_right_diff (`#eq) (`#m)); // Drop the slprops generated, they will be solved later dismiss_slprops (); n_identity_left (n-1) eq m ) /// Helper lemma: If two vprops (as represented by lists of atoms) are equivalent, then their canonical forms /// (corresponding to applying the sort function on atoms) are equivalent let equivalent_sorted (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (l1 l2 l1' l2':list atom) : Lemma (requires sort l1 == sort l1' /\ sort l2 == sort l2' /\ xsdenote eq m am l1 `CE.EQ?.eq eq` xsdenote eq m am l2) (ensures xsdenote eq m am l1' `CE.EQ?.eq eq` xsdenote eq m am l2') = let open FStar.Algebra.CommMonoid.Equiv in sort_correct_aux eq m am l1'; sort_correct_aux eq m am l1; EQ?.symmetry eq (xsdenote eq m am l1) (xsdenote eq m am (sort l1)); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am (sort l1')) (xsdenote eq m am l1); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am l1) (xsdenote eq m am l2); sort_correct_aux eq m am l2; EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am l2) (xsdenote eq m am (sort l2)); sort_correct_aux eq m am l2'; EQ?.symmetry eq (xsdenote eq m am l2') (xsdenote eq m am (sort l2')); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am (sort l2)) (xsdenote eq m am l2') #pop-options /// Finds the position of first occurrence of x in xs. /// This is now specialized to terms and their funny term_eq_old. let rec where_aux (n:nat) (x:term) (xs:list term) : Tac (option nat) (decreases xs) = match xs with | [] -> None | x'::xs' -> if term_eq_old x x' then Some n else where_aux (n+1) x xs' let where = where_aux 0 let fatom (t:term) (ts:list term) (am:amap term) : Tac (exp * list term * amap term) = match where t ts with | Some v -> (Atom v, ts, am) | None -> let vfresh = List.Tot.Base.length ts in let t = norm_term [iota; zeta] t in (Atom vfresh, ts `List.Tot.append` [t], update vfresh t am) /// Transforming a term into the corresponding list of atoms /// If the atomic terms were already present in the map [am], then /// they correspond to the same atoms /// This expects that mult, unit, and t have already been normalized let rec reification_aux (ts:list term) (am:amap term) (mult unit t : term) : Tac (exp * list term * amap term) = let hd, tl = collect_app_ref t in match inspect_unascribe hd, List.Tot.Base.list_unref tl with | Tv_FVar fv, [(t1, Q_Explicit) ; (t2, Q_Explicit)] -> if term_eq_old (pack (Tv_FVar fv)) mult then (let (e1, ts, am) = reification_aux ts am mult unit t1 in let (e2, ts, am) = reification_aux ts am mult unit t2 in (Mult e1 e2, ts, am)) else fatom t ts am | _, _ -> if term_eq_old t unit then (Unit, ts, am) else fatom t ts am /// Performs the required normalization before calling the function above let reification (eq: term) (m: term) (ts:list term) (am:amap term) (t:term) : Tac (exp * list term * amap term) = let mult = norm_term [iota; zeta; delta] (`CE.CM?.mult (`#m)) in let unit = norm_term [iota; zeta; delta] (`CE.CM?.unit (`#m)) in let t = norm_term [iota; zeta] t in reification_aux ts am mult unit t /// Meta-F* internal: Transforms the atom map into a term let rec convert_map (m : list (atom * term)) : term = match m with | [] -> `[] | (a, t)::ps -> let a = pack (Tv_Const (C_Int a)) in (* let t = norm_term [delta] t in *) `((`#a, (`#t)) :: (`#(convert_map ps))) /// `am` is an amap (basically a list) of terms, each representing a value /// of type `a` (whichever we are canonicalizing). This functions converts /// `am` into a single `term` of type `amap a`, suitable to call `mdenote` with *) let convert_am (am : amap term) : term = let (map, def) = am in (* let def = norm_term [delta] def in *) `( (`#(convert_map map), `#def) ) /// Transforms a term representatoin into a term through quotation let rec quote_exp (e:exp) : term = match e with | Unit -> (`Unit) | Mult e1 e2 -> (`Mult (`#(quote_exp e1)) (`#(quote_exp e2))) | Atom n -> let nt = pack (Tv_Const (C_Int n)) in (`Atom (`#nt)) let rec quote_atoms (l:list atom) = match l with | [] -> `[] | hd::tl -> let nt = pack (Tv_Const (C_Int hd)) in (`Cons (`#nt) (`#(quote_atoms tl))) /// Some internal normalization steps to make reflection of vprops into atoms and atom permutation go smoothly. /// We reimplemented sorting/list functions to normalize our uses without normalizing those introduced by the user. let normal_tac_steps = [primops; iota; zeta; delta_only [ `%mdenote; `%select; `%my_assoc; `%my_append; `%flatten; `%sort; `%my_sortWith; `%my_partition; `%my_bool_of_compare; `%my_compare_of_bool; `%fst; `%__proj__Mktuple2__item___1; `%snd; `%__proj__Mktuple2__item___2; `%CE.__proj__CM__item__unit; `%CE.__proj__CM__item__mult; `%rm]] /// The normalization function, using the above normalization steps let normal_tac (#a:Type) (x:a) : a = FStar.Pervasives.norm normal_tac_steps x /// Helper lemma to establish relation between normalized and initial values let normal_elim (x:Type0) : Lemma (requires x) (ensures normal_tac x) = () exception Result of list atom * list atom * bool * list term /// F* equalities are typed, but the generated type sometimes is a unification variable. /// This helper ensures that such unification variables are not left unresolved, which would lead to an error let close_equality_typ' (t:term) : Tac unit = let f = term_as_formula' t in match f with | Comp (Eq (Some u)) l _ -> if is_uvar u then (unshelve u; exact_with_ref (tc (cur_env()) l)) | _ -> () /// Recursively closing equality types in a given term (usually a unification constraint) let close_equality_typ (t:term) : Tac unit = visit_tm close_equality_typ' t /// Core unification tactic. /// Transforms terms into their atom representations, /// Tries to find a solution to AC-unification, and if so, /// soundly permutes the atom representations before calling the unifier /// to check the validity of the provided solution. /// In the case where SMT rewriting was needed, equalities abduction is performed by instantiating the /// abduction prop unification variable with the corresponding guard /// 09/24: /// /// The tactic internally builds a map from atoms to terms /// and uses the map for reflecting the goal to atoms representation /// During reflection, the tactics engine typechecks the amap, and hence all /// the terms again /// This typechecking of terms is unnecessary, since the terms are coming /// from the goal, and hence are already well-typed /// Worse, re-typechecking them may generate a lot of SMT queries /// And even worse, the SMT queries are discharged in the static context, /// requiring various workarounds (e.g. squash variables for if conditions etc.) /// /// To fix this, we now "name" the terms and use the amap with names /// /// Read through the canon_l_r function for how we do this /// The following three lemmas are helpers to manipulate the goal in canon_l_r [@@ no_subtyping] let inst_bv (#a:Type) (#p:a -> Type0) (#q:Type0) (x:a) (_:squash (p x ==> q)) : Lemma ((forall (x:a). p x) ==> q) = () let modus_ponens (#p #q:Type0) (_:squash p) : Lemma ((p ==> q) ==> q) = () let cut (p q:Type0) : Lemma (requires p /\ (p ==> q)) (ensures q) = () let and_true (p: Type0) : Lemma (requires (p /\ (p ==> True))) (ensures p) = () let solve_implies_true (p: Type0) : Lemma (p ==> True) = () // This exception is raised for failures that should not be considered // hard but should allow postponing the goal instead exception Postpone of string (* NOTE! Redefining boolean disjunction to *not* be short-circuiting, since we cannot use an effectful result as argument of Prims.op_BarBar *) private let bor = op_BarBar private let is_and (t:term) : bool = is_any_fvar t [`%(/\); `%prop_and] private let is_squash (t:term) : bool = is_any_fvar t [`%squash; `%auto_squash] private let is_star (t:term) : bool = is_any_fvar t [`%star; `%VStar] private let is_star_or_unit (t:term) : bool = is_any_fvar t [`%star; `%VStar; `%VUnit] let rec unify_pr_with_true (pr: term) : Tac unit = let hd, tl = collect_app pr in if is_and hd then match tl with | [pr_l, _; pr_r, _] -> unify_pr_with_true pr_l; unify_pr_with_true pr_r | _ -> fail "unify_pr_with_true: ill-formed /\\" else match inspect_unascribe hd with | Tv_Uvar _ _ -> if unify pr (`true_p) then () else begin fail "unify_pr_with_true: could not unify SMT prop with True" end | _ -> if List.Tot.length (free_uvars pr) = 0 then () else // postpone the goal instead of failing hard, to allow for other goals to solve those uvars raise (Postpone "unify_pr_with_true: some uvars are still there") let elim_and_l_squash (#a #b: Type0) (#goal: Type0) (f: (a -> Tot (squash goal))) (h: (a /\ b)) : Tot (squash goal) = let f' (x: squash a) : Tot (squash goal) = FStar.Squash.bind_squash x f in let elim_impl (x: squash (a /\ b)) : Tot (squash a) = () in f' (elim_impl (FStar.Squash.return_squash h)) let elim_and_r_squash (#a #b: Type0) (#goal: Type0) (f: (b -> Tot (squash goal))) (h: (a /\ b)) : Tot (squash goal) = let f' (x: squash b) : Tot (squash goal) = FStar.Squash.bind_squash x f in let elim_impl (x: squash (a /\ b)) : Tot (squash b) = () in f' (elim_impl (FStar.Squash.return_squash h)) let _return_squash (#a: Type) () (x: a) : Tot (squash a) = FStar.Squash.return_squash x let rec set_abduction_variable_term (pr: term) : Tac term = let hd, tl = collect_app pr in if is_and hd then match tl with | (pr_l, Q_Explicit) :: (pr_r, Q_Explicit) :: [] -> if List.Tot.length (free_uvars pr_r) = 0 then let arg = set_abduction_variable_term pr_l in mk_app (`elim_and_l_squash) [arg, Q_Explicit] else if List.Tot.length (free_uvars pr_l) = 0 then let arg = set_abduction_variable_term pr_r in mk_app (`elim_and_r_squash) [arg, Q_Explicit] else // postpone the goal instead of failing hard, to allow for other goals to solve those uvars raise (Postpone "set_abduction_variable_term: there are still uvars on both sides of l_and") | _ -> fail "set_abduction_variable: ill-formed /\\" else match inspect hd with | Tv_Uvar _ _ -> mk_app (`_return_squash) [`(), Q_Explicit] | _ -> fail "set_abduction_variable: cannot unify" let set_abduction_variable () : Tac unit = let g = cur_goal () in match inspect_unascribe g with | Tv_Arrow b _ -> let pr = b.sort in exact (set_abduction_variable_term pr) | _ -> fail "Not an arrow goal" let canon_l_r (use_smt:bool) (carrier_t:term) //e.g. vprop (eq:term) (m:term) (pr pr_bind:term) (lhs rel rhs:term) : Tac unit = let m_unit = norm_term [iota; zeta; delta] (`(CE.CM?.unit (`#m))) in let m_mult = norm_term [iota; zeta; delta] (`(CE.CM?.mult (`#m))) in let am = const m_unit in (* empty map *) let (r1_raw, ts, am) = reification eq m [] am lhs in let (r2_raw, _, am) = reification eq m ts am rhs in // Encapsulating this in a try/with to avoid spawning uvars for smt_fallback let l1_raw, l2_raw, emp_frame, uvar_terms = try let res = equivalent_lists use_smt (flatten r1_raw) (flatten r2_raw) am in raise (Result res) with | TacticFailure m -> fail m | Result res -> res | _ -> fail "uncaught exception in equivalent_lists" in //So now we have: // am : amap mapping atoms to terms in lhs and rhs // r1_raw : an expression in the atoms language for lhs // r2_raw : an expression in the atoms language for rhs // l1_raw : sorted list of atoms in lhs // l2_raw : sorted list of atoms in rhs // //In particular, r1_raw and r2_raw capture lhs and rhs structurally // (i.e. same associativity, emp, etc.) // //Whereas l1_raw and l2_raw are "canonical" representations of lhs and rhs // (vis xsdenote) //Build an amap where atoms are mapped to names //The type of these names is carrier_t passed by the caller let am_bv : list (atom & namedv & typ) = mapi (fun i (a, _) -> let x = fresh_namedv_named ("x" ^ (string_of_int i)) in (a, x, carrier_t)) (fst am) in let am_bv_term : amap term = map (fun (a, bv, _sort) -> a, pack (Tv_Var bv)) am_bv, snd am in let mdenote_tm (e:exp) : term = mdenote_gen m_unit (fun t1 t2 -> mk_app m_mult [(t1, Q_Explicit); (t2, Q_Explicit)]) am_bv_term e in let xsdenote_tm (l:list atom) : term = xsdenote_gen m_unit (fun t1 t2 -> mk_app m_mult [(t1, Q_Explicit); (t2, Q_Explicit)]) am_bv_term l in //Get the named representations of lhs, rhs, and their respective sorted versions let lhs_named = mdenote_tm r1_raw in let rhs_named = mdenote_tm r2_raw in let sorted_lhs_named = xsdenote_tm l1_raw in let sorted_rhs_named = xsdenote_tm l2_raw in //We now build an auxiliary goal of the form: // // forall xs. (sorted_lhs_named `rel` sorted_rhs_names) ==> (lhs_names `rel` rhs_named) // // where xs are the fresh names that we introduced earlier let mk_rel (l r:term) : term = mk_app rel [(l, Q_Explicit); (r, Q_Explicit)] in let imp_rhs = mk_rel lhs_named rhs_named in let imp_lhs = mk_rel sorted_lhs_named sorted_rhs_named in let imp = mk_app (pack (Tv_FVar (pack_fv imp_qn))) [(imp_lhs, Q_Explicit); (imp_rhs, Q_Explicit)] in //fold over names and quantify over them let aux_goal = fold_right (fun (_, nv, sort) t -> let nvv = inspect_namedv nv in let b = { ppname = nvv.ppname; uniq = nvv.uniq; qual = Q_Explicit; attrs = []; sort = sort; } in let _, t = close_term b t in let t = pack (Tv_Abs b t) in mk_app (pack (Tv_FVar (pack_fv forall_qn))) [t, Q_Explicit]) am_bv imp in //Introduce a cut with the auxiliary goal apply_lemma (`cut (`#aux_goal)); //After the cut, the goal looks like: A /\ (A ==> G) // where A is the auxiliary goal and G is the original goal (lhs `rel` rhs) split (); //Solving A: focus (fun _ -> //The proof follows a similar structure as before naming was introduced // //Except that this time, the amap is in terms of names, // and hence its typechecking is faster and (hopefully) no SMT involved //Open the forall binders in A, and use the fresh names to build an amap let am = fold_left (fun am (a, _, _sort) -> let b = forall_intro () in let v = binding_to_namedv b in (a, pack (Tv_Var v))::am) [] am_bv, snd am in //Introduce the lhs of implication let b = implies_intro () in //Now the proof is the plain old canon proof let am = convert_am am in let r1 = quote_exp r1_raw in let r2 = quote_exp r2_raw in change_sq (`(normal_tac (mdenote (`#eq) (`#m) (`#am) (`#r1) `CE.EQ?.eq (`#eq)` mdenote (`#eq) (`#m) (`#am) (`#r2)))); apply_lemma (`normal_elim); apply (`monoid_reflect ); let l1 = quote_atoms l1_raw in let l2 = quote_atoms l2_raw in apply_lemma (`equivalent_sorted (`#eq) (`#m) (`#am) (`#l1) (`#l2)); if List.Tot.length (goals ()) = 0 then () else begin norm [primops; iota; zeta; delta_only [`%xsdenote; `%select; `%my_assoc; `%my_append; `%flatten; `%sort; `%my_sortWith; `%my_partition; `%my_bool_of_compare; `%my_compare_of_bool; `%fst; `%__proj__Mktuple2__item___1; `%snd; `%__proj__Mktuple2__item___2; `%CE.__proj__CM__item__unit; `%CE.__proj__CM__item__mult; `%rm; `%CE.__proj__EQ__item__eq; `%req; `%star;] ]; //The goal is of the form G1 /\ G2 /\ G3, as in the requires of equivalent_sorted split (); split (); //Solve G1 and G2 by trefl trefl (); trefl (); //G3 is the lhs of the implication in the auxiliary goal // that we have in our assumptions via b apply (`FStar.Squash.return_squash); exact (binding_to_term b) end); dismiss_slprops(); //Our goal now is A ==> G (where G is the original goal (lhs `rel` rhs)) //Open the forall binders ignore (repeatn (List.Tot.length am_bv) (fun _ -> apply_lemma (`inst_bv))); //And apply modus ponens apply_lemma (`modus_ponens); //Now our goal is sorted_lhs_named `rel` sorted_rhs_named // where the names are replaced with fresh uvars (from the repeatn call above) //So we just trefl match uvar_terms with | [] -> // Closing unneeded prop uvar focus (fun _ -> try apply_lemma (`and_true); split (); if emp_frame then apply_lemma (`identity_left (`#eq) (`#m)) else apply_lemma (`(CE.EQ?.reflexivity (`#eq))); unify_pr_with_true pr; // MUST be done AFTER identity_left/reflexivity, which can unify other uvars apply_lemma (`solve_implies_true) with | TacticFailure msg -> fail ("Cannot unify pr with true: " ^ msg) | e -> raise e ) | l -> if emp_frame then ( apply_lemma (`identity_left_smt (`#eq) (`#m)) ) else ( apply_lemma (`smt_reflexivity (`#eq)) ); t_trefl true; close_equality_typ (cur_goal()); revert (); set_abduction_variable () /// Wrapper around the tactic above /// The constraint should be of the shape `squash (equiv lhs rhs)` let canon_monoid (use_smt:bool) (carrier_t:term) (eq m:term) (pr pr_bind:term) : Tac unit = norm [iota; zeta]; let t = cur_goal () in // removing top-level squash application let sq, rel_xy = collect_app_ref t in // unpacking the application of the equivalence relation (lhs `EQ?.eq eq` rhs) (match rel_xy with | [(rel_xy,_)] -> ( let open FStar.List.Tot.Base in let rel, xy = collect_app_ref rel_xy in if (length xy >= 2) then ( match index xy (length xy - 2) , index xy (length xy - 1) with | (lhs, Q_Explicit) , (rhs, Q_Explicit) -> canon_l_r use_smt carrier_t eq m pr pr_bind lhs rel rhs | _ -> fail "Goal should have been an application of a binary relation to 2 explicit arguments" ) else ( fail "Goal should have been an application of a binary relation to n implicit and 2 explicit arguments" ) ) | _ -> fail "Goal should be squash applied to a binary relation") /// Instantiation of the generic AC-unification tactic with the vprop commutative monoid let canon' (use_smt:bool) (pr:term) (pr_bind:term) : Tac unit = canon_monoid use_smt (pack (Tv_FVar (pack_fv [`%vprop]))) (`req) (`rm) pr pr_bind /// Counts the number of unification variables corresponding to vprops in the term [t] let rec slterm_nbr_uvars (t:term) : Tac int = match inspect_unascribe t with | Tv_Uvar _ _ -> 1 | Tv_App _ _ -> let hd, args = collect_app t in if is_star_or_unit hd then // Only count the number of unresolved slprops, not program implicits slterm_nbr_uvars_argv args else if is_uvar hd then 1 else 0 | Tv_Abs _ t -> slterm_nbr_uvars t | _ -> 0 and slterm_nbr_uvars_argv (args: list argv) : Tac int = fold_left (fun n (x, _) -> n + slterm_nbr_uvars x) 0 args let guard_vprop (v: vprop) : Tot vprop = v let rec all_guards_solved (t: term) : Tac bool = match inspect_unascribe t with | Tv_Abs _ t -> all_guards_solved t | Tv_App _ _ -> let hd, args = collect_app t in if hd `is_fvar` (`%guard_vprop) then slterm_nbr_uvars_argv args = 0 else if not (all_guards_solved hd) then false else List.Tot.fold_left (fun (tac: (unit -> Tac bool)) (tm, _) -> let f () : Tac bool = if all_guards_solved tm then tac () else false in f ) (let f () : Tac bool = true in f) args () | _ -> true let unfold_guard () : Tac bool = if all_guards_solved (cur_goal ()) then begin focus (fun _ -> norm [delta_only [(`%guard_vprop)]]); true end else false let rec term_is_uvar (t: term) (i: int) : Tac bool = match inspect t with | Tv_Uvar i' _ -> i = i' | Tv_App _ _ -> let hd, args = collect_app t in term_is_uvar hd i | _ -> false val solve_can_be_split_for : string -> Tot unit val solve_can_be_split_lookup : unit // FIXME: src/reflection/FStar.Reflection.Basic.lookup_attr only supports fvar attributes, so we cannot directly look up for (solve_can_be_split_for blabla), we need a nullary attribute to use with lookup_attr let rec dismiss_all_but_last' (l: list goal) : Tac unit = match l with | [] | [_] -> set_goals l | _ :: q -> dismiss_all_but_last' q let dismiss_all_but_last () : Tac unit = dismiss_all_but_last' (goals ()) let rec dismiss_non_squash_goals' (keep:list goal) (goals:list goal) : Tac unit = match goals with | [] -> set_goals (List.Tot.rev keep) | hd :: tl -> let f = term_as_formula' (goal_type hd) in match f with | App hs _ -> if is_squash hs then dismiss_non_squash_goals' (hd::keep) tl else dismiss_non_squash_goals' keep tl | _ -> dismiss_non_squash_goals' keep tl let dismiss_non_squash_goals () = let g = goals () in dismiss_non_squash_goals' [] g let rec term_mem (te: term) (l: list term) : Tac bool = match l with | [] -> false | t' :: q -> if te `term_eq_old` t' then true else term_mem te q let rec lookup_by_term_attr' (attr: term) (e: env) (found: list fv) (l: list fv) : Tac (list fv) = match l with | [] -> List.Tot.rev found | f :: q -> let n = inspect_fv f in begin match lookup_typ e n with | None -> lookup_by_term_attr' attr e found q | Some se -> let found' = if attr `term_mem` sigelt_attrs se then f :: found else found in lookup_by_term_attr' attr e found' q end let lookup_by_term_attr (label_attr: term) (attr: term) : Tac (list fv) = let e = cur_env () in let candidates = lookup_attr label_attr e in lookup_by_term_attr' attr e [] candidates let rec bring_last_goal_on_top' (others: list goal) (goals: list goal) : Tac unit = match goals with | [] -> set_goals (List.Tot.rev others) | last :: [] -> set_goals (last :: List.Tot.rev others) | a :: q -> bring_last_goal_on_top' (a :: others) q let bring_last_goal_on_top () = let g = goals () in bring_last_goal_on_top' [] g let rec extract_contexts (lemma_left lemma_right label_attr attr: term) (t: term) : Tac (option (unit -> Tac unit)) = let hd, tl = collect_app t in if is_star hd then match tl with | (t_left, Q_Explicit) :: (t_right, Q_Explicit) :: [] -> let extract_right () : Tac (option (unit -> Tac unit)) = match extract_contexts lemma_left lemma_right label_attr attr t_right with | None -> None | Some f -> Some (fun _ -> apply_lemma lemma_right; dismiss_all_but_last (); f () ) in begin match extract_contexts lemma_left lemma_right label_attr attr t_left with | None -> extract_right () | Some f -> Some (fun _ -> try apply_lemma lemma_left; dismiss_all_but_last (); f () with _ -> begin match extract_right () with | None -> fail "no context on the right either" | Some g -> g () end ) end | _ -> None else let candidates = let hd_fv = match inspect_unascribe hd with | Tv_FVar fv -> Some fv | Tv_UInst fv _ -> Some fv | _ -> None in match hd_fv with | None -> [] | Some hd_fv -> let hd_s' = implode_qn (inspect_fv hd_fv) in let hd_s = pack (Tv_Const (C_String hd_s')) in lookup_by_term_attr label_attr (mk_app attr [hd_s, Q_Explicit]) in if Nil? candidates then None else Some (fun _ -> first (List.Tot.map (fun candidate _ -> apply_lemma (pack (Tv_FVar candidate)) <: Tac unit) candidates); dismiss_non_squash_goals () ) let extract_cbs_contexts = extract_contexts (`can_be_split_congr_l) (`can_be_split_congr_r) (`solve_can_be_split_lookup) (`solve_can_be_split_for) let open_existentials () : Tac unit = let e = cur_env () in if Nil? (lookup_attr (`solve_can_be_split_lookup) e) then fail "Tactic disabled: no available lemmas in context"; norm [delta_attr [`%__reduce__]]; let t0 = cur_goal () in match collect_app t0 with | _ (* squash/auto_squash *) , (t1, Q_Explicit) :: [] -> let hd, tl = collect_app t1 in if hd `is_fvar` (`%can_be_split) then match tl with | _ (* lhs *) :: (rhs, Q_Explicit) :: [] -> begin match extract_cbs_contexts rhs with | None -> fail "open_existentials: no context found" | Some f -> apply_lemma (`can_be_split_trans_rev); dismiss_all_but_last (); split (); focus f; bring_last_goal_on_top () // so that any preconditions for the selected lemma are scheduled for later end | _ -> fail "open_existentials: ill-formed can_be_split" else fail "open_existentials: not a can_be_split goal" | _ -> fail "open_existentials: not a squash goal" let try_open_existentials () : Tac bool = focus (fun _ -> try open_existentials (); true with _ -> false ) (* Solving the can_be_split* constraints, if they are ready to be scheduled A constraint is deemed ready to be scheduled if it contains only one vprop unification variable If so, constraints are stripped to their underlying definition based on vprop equivalence, introducing universally quantified variables when needed. Internal details of the encoding are removed through normalization, before calling the AC-unification tactic defined above *) /// Solves a `can_be_split` constraint let rec solve_can_be_split (args:list argv) : Tac bool = match args with | [(t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in try focus (fun _ -> apply_lemma (`equiv_can_be_split); dismiss_slprops(); // If we have exactly the same term on both side, // equiv_sl_implies would solve the goal immediately or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if rnbr = 0 then apply_lemma (`equiv_sym); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true with | _ -> let opened_some = try_open_existentials () in if opened_some then solve_can_be_split args // we only need args for their number of uvars, which has not changed else false ) else false | _ -> false // Ill-formed can_be_split, should not happen /// Solves a can_be_split_dep constraint let solve_can_be_split_dep (args:list argv) : Tac bool = match args with | [(p, _); (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> let p_bind = implies_intro () in apply_lemma (`equiv_can_be_split); dismiss_slprops (); or_else (fun _ -> let b = unify p (`true_p) in if not b then fail "could not unify SMT prop with True"; apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' true p (binding_to_term p_bind))); true ) else false | _ -> fail "ill-formed can_be_split_dep" /// Helper rewriting lemma val emp_unit_variant (p:vprop) : Lemma (ensures can_be_split p (p `star` emp)) /// Solves a can_be_split_forall constraint let solve_can_be_split_forall (args:list argv) : Tac bool = match args with | [_; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> ignore (forall_intro()); apply_lemma (`equiv_can_be_split); dismiss_slprops(); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed can_be_split_forall, should not happen" val solve_can_be_split_forall_dep_for : string -> Tot unit val solve_can_be_split_forall_dep_lookup : unit // FIXME: same as solve_can_be_split_for above let extract_cbs_forall_dep_contexts = extract_contexts (`can_be_split_forall_dep_congr_l) (`can_be_split_forall_dep_congr_r) (`solve_can_be_split_forall_dep_lookup) (`solve_can_be_split_forall_dep_for) let open_existentials_forall_dep () : Tac unit = let e = cur_env () in if Nil? (lookup_attr (`solve_can_be_split_forall_dep_lookup) e) then fail "Tactic disabled: no available lemmas in context"; norm [ delta_only [ `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%rm; ]; iota; delta_attr [`%__reduce__]; ]; let t0 = cur_goal () in match collect_app t0 with | _ (* squash/auto_squash *) , (t1, Q_Explicit) :: [] -> let hd, tl = collect_app t1 in if hd `is_fvar` (`%can_be_split_forall_dep) then match tl with | _ (* cond *) :: _ (* lhs *) :: (rhs, Q_Explicit) :: [] | (_, Q_Implicit) (* #a *) :: _ (* cond *) :: _ (* lhs *) :: (rhs, Q_Explicit) :: [] -> begin match inspect_unascribe rhs with | Tv_Abs _ body -> begin match extract_cbs_forall_dep_contexts body with | None -> fail "open_existentials_forall_dep: no candidate" | Some f -> apply_lemma (`can_be_split_forall_dep_trans_rev); dismiss_all_but_last (); split (); focus f; bring_last_goal_on_top (); if Cons? (goals ()) then norm [] end | _ -> fail "open_existentials_forall_dep : not an abstraction" end | _ -> fail "open_existentials_forall_dep : wrong number of arguments to can_be_split_forall_dep" else fail "open_existentials_forall_dep : not a can_be_split_forall_dep goal" | _ -> fail "open_existentials_forall_dep : not a squash/auto_squash goal" let try_open_existentials_forall_dep () : Tac bool = focus (fun _ -> try open_existentials_forall_dep (); true with _ -> false ) /// Solves a can_be_split_forall_dep constraint let rec solve_can_be_split_forall_dep (args:list argv) : Tac bool = match args with | [_; (pr, _); (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in try focus (fun _ -> norm []; let x = forall_intro () in let pr = mk_app pr [(binding_to_term x, Q_Explicit)] in let p_bind = implies_intro () in apply_lemma (`equiv_can_be_split); or_else (fun _ -> flip()) (fun _ -> ()); let pr = norm_term [] pr in or_else (fun _ -> let b = unify pr (`true_p) in if not b then fail "could not unify SMT prop with True"; apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' true pr (binding_to_term p_bind))); true with | Postpone msg -> false | TacticFailure msg -> let opened = try_open_existentials_forall_dep () in if opened then solve_can_be_split_forall_dep args // we only need args for their number of uvars, which has not changed else fail msg | _ -> fail "Unexpected exception in framing tactic" ) else false | _ -> fail "Ill-formed can_be_split_forall_dep, should not happen" /// Solves an equiv_forall constraint let solve_equiv_forall (args:list argv) : Tac bool = match args with | [_; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> apply_lemma (`equiv_forall_elim); match goals () with | [] -> () | _ -> dismiss_slprops (); ignore (forall_intro()); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed equiv_forall, should not happen" /// Solves an equiv constraint let solve_equiv (args:list argv) : Tac bool = match args with | [(t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip ()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed equiv, should not happen" /// Solves a can_be_split_post constraint let solve_can_be_split_post (args:list argv) : Tac bool = match args with | [_; _; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> norm[]; let g = _cur_goal () in ignore (forall_intro()); apply_lemma (`equiv_forall_elim); match goals () with | [] -> () | _ -> dismiss_slprops (); ignore (forall_intro()); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "ill-formed can_be_split_post" /// Checks whether any of the two terms was introduced during a Steel monadic return let is_return_eq (l r:term) : Tac bool = let nl, al = collect_app l in let nr, ar = collect_app r in is_fvar nl (`%return_pre) || is_fvar nr (`%return_pre) /// Solves indirection equalities introduced by the layered effects framework. /// If these equalities were introduced during a monadic return, they need to be solved /// at a later stage to avoid overly restricting contexts of unification variables let rec solve_indirection_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> if is_return_eq l r then later() else trefl(); solve_indirection_eqs (fuel - 1) | _ -> later(); solve_indirection_eqs (fuel - 1) /// Solve all equalities in the list of goals by calling the F* unifier let rec solve_all_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> trefl(); solve_all_eqs (fuel - 1) | _ -> later(); solve_all_eqs (fuel - 1) /// It is important to not normalize the return_pre eqs goals before unifying /// See test7 in FramingTestSuite for a detailed explanation let rec solve_return_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> trefl(); solve_return_eqs (fuel - 1) | _ -> later(); solve_return_eqs (fuel - 1) /// Strip annotations in a goal, to get to the underlying slprop equivalence let goal_to_equiv (loc:string) : Tac unit = let t = cur_goal () in let f = term_as_formula' t in match f with | App hd0 t -> if not (is_fvar hd0 (`%squash)) then fail (loc ^ " unexpected non-squash goal in goal_to_equiv"); let hd, args = collect_app t in if hd `is_fvar` (`%can_be_split) then ( apply_lemma (`equiv_can_be_split) ) else if hd `is_fvar` (`%can_be_split_forall) then ( ignore (forall_intro ()); apply_lemma (`equiv_can_be_split) ) else if hd `is_fvar` (`%equiv_forall) then ( apply_lemma (`equiv_forall_elim); ignore (forall_intro ()) ) else if hd `is_fvar` (`%can_be_split_post) then ( apply_lemma (`can_be_split_post_elim); dismiss_slprops(); ignore (forall_intro ()); ignore (forall_intro ()) ) else if hd `is_fvar` (`%can_be_split_dep) then ( fail ("can_be_split_dep not supported in " ^ loc) ) else if hd `is_fvar` (`%can_be_split_forall_dep) then ( fail ("can_be_split_forall_dep not supported in " ^ loc) ) else // This should never happen fail (loc ^ " goal in unexpected position") | _ -> fail (loc ^ " unexpected goal") let rec term_dict_assoc (#a: Type) (key: term) (l: list (term & a)) : Tac (list a) = match l with | [] -> [] | (k, v) :: q -> let q' = term_dict_assoc key q in if k `term_eq_old` key then (v :: q') else q' /// Returns true if the goal has been solved, false if it should be delayed let solve_or_delay (dict: list (term & (unit -> Tac bool))) : Tac bool = // Beta-reduce the goal first if possible norm []; let f = term_as_formula' (cur_goal ()) in match f with | App hd0 t -> if is_fvar hd0 (`%squash) then let hd, args = collect_app t in if hd `is_fvar` (`%can_be_split) then solve_can_be_split args else if hd `is_fvar` (`%can_be_split_forall) then solve_can_be_split_forall args else if hd `is_fvar` (`%equiv_forall) then solve_equiv_forall args else if hd `is_fvar` (`%can_be_split_post) then solve_can_be_split_post args else if hd `is_fvar` (`%equiv) then solve_equiv args else if hd `is_fvar` (`%can_be_split_dep) then solve_can_be_split_dep args else if hd `is_fvar` (`%can_be_split_forall_dep) then solve_can_be_split_forall_dep args else let candidates = term_dict_assoc hd dict in let run_tac (tac: unit -> Tac bool) () : Tac bool = focus tac in begin try first (List.Tot.map run_tac candidates) with _ -> (* this is a logical goal, solve it only if it has no uvars *) if List.Tot.length (free_uvars t) = 0 then (smt (); true) else false end else // TODO: handle non-squash goals here false | Comp (Eq _) l r -> let lnbr = List.Tot.length (free_uvars l) in let rnbr = List.Tot.length (free_uvars r) in // Only solve equality if one of the terms is completely determined if lnbr = 0 || rnbr = 0 then (trefl (); true) else false | _ -> false /// Returns true if it successfully solved a goal /// If it returns false, it means it didn't find any solvable goal, /// which should mean only delayed goals are left let rec vprop_term_uvars (t:term) : Tac (list int) = match inspect_unascribe t with | Tv_Uvar i' _ -> [i'] | Tv_App _ _ -> let hd, args = collect_app t in if is_star_or_unit hd then // Only count the number of unresolved slprops, not program implicits argv_uvars args else vprop_term_uvars hd | Tv_Abs _ t -> vprop_term_uvars t | _ -> [] and argv_uvars (args: list argv) : Tac (list int) = let t : unit -> Tac (list int) = fold_left (fun (n: unit -> Tac (list int)) (x, _) -> let t () : Tac (list int) = let l1 = n () in let l2 = vprop_term_uvars x in l1 `List.Tot.append` l2 in t ) (fun _ -> []) args in t () let rec remove_dups_from_sorted (#t: eqtype) (l: list t) : Tot (list t) = match l with | [] | [_] -> l | a1 :: a2 :: q -> if a1 = a2 then remove_dups_from_sorted (a2 :: q) else a1 :: remove_dups_from_sorted (a2 :: q) let simplify_list (l: list int) : Tot (list int) = remove_dups_from_sorted (List.Tot.sortWith (List.Tot.compare_of_bool (<)) l) let goal_term_uvars (t: term) : Tac (list int) = let hd, tl = collect_app t in if hd `is_fvar` (`%squash) then match tl with | [tl0, Q_Explicit] -> let _, tl1 = collect_app tl0 in simplify_list (argv_uvars tl1) | _ -> dump "ill-formed squash"; [] else [] let rec merge_sorted (l1 l2: list int) : Tot (list int) (decreases (List.Tot.length l1 + List.Tot.length l2)) = match l1 with | [] -> l2 | a1 :: q1 -> begin match l2 with | [] -> l1 | a2 :: q2 -> if a1 < a2 then a1 :: merge_sorted q1 l2 else if a2 < a1 then a2 :: merge_sorted l1 q2 else a1 :: merge_sorted q1 q2 end let rec sorted_lists_intersect (l1 l2: list int) : Tot bool (decreases (List.Tot.length l1 + List.Tot.length l2)) = match l1 with | [] -> false | a1 :: q1 -> begin match l2 with | [] -> false | a2 :: q2 -> if a1 = a2 then true else if a1 < a2 then sorted_lists_intersect q1 l2 else sorted_lists_intersect l1 q2 end /// TODO: cache the list of variables for each goal, to avoid computing them several times /// Compute the list of all vprop uvars that appear in the same goal as unsolved guard_vprop let rec compute_guarded_uvars1 (accu: list int) (g: list goal) : Tac (list int) = match g with | [] -> accu | a :: q -> let t = goal_type a in let accu' = if all_guards_solved t then accu else merge_sorted accu (goal_term_uvars t) in compute_guarded_uvars1 accu' q /// Enrich the list of vprop uvars with those that appear in the same goal let rec compute_guarded_uvars2 (accu: list int) (g: list goal) : Tac (list int) = match g with | [] -> accu | a :: q -> let t = goal_type a in let l = goal_term_uvars t in let accu' = if sorted_lists_intersect accu l then merge_sorted accu l else accu in compute_guarded_uvars2 accu' q let rec compute_guarded_uvars3 (accu: list int) (g: list goal) : Tac (list int) = let accu' = compute_guarded_uvars2 accu g in if accu = accu' then accu else compute_guarded_uvars3 accu' g let compute_guarded_uvars () : Tac (list int) = let g = goals () in let accu = compute_guarded_uvars1 [] g in compute_guarded_uvars3 accu g let rec pick_next (guarded_uvars: list int) (dict: _) (fuel: nat) : Tac bool = if fuel = 0 then false else match goals () with | [] -> true | a::_ -> let t = goal_type a in let l = goal_term_uvars t in let next () : Tac bool = later (); pick_next guarded_uvars dict (fuel - 1) in if sorted_lists_intersect guarded_uvars l then next () else if solve_or_delay dict then true else next () /// Main loop to schedule solving of goals. /// The goals () function fetches all current goals in the context let rec resolve_tac (dict: _) : Tac unit = match goals () with | [] -> () | g -> norm []; let guarded_uvars = compute_guarded_uvars () in // TODO: If it picks a goal it cannot solve yet, try all the other ones? if pick_next guarded_uvars dict (List.Tot.length g) then resolve_tac dict else fail "Could not make progress, no solvable goal found" let rec pick_next_logical (dict: _) (fuel: nat) : Tac bool = if fuel = 0 then false else match goals () with | [] -> true | _::_ -> if solve_or_delay dict then true else (later (); pick_next_logical dict (fuel - 1)) /// Special case for logical requires/ensures goals, which correspond only to equalities let rec resolve_tac_logical (dict: _) : Tac unit = match goals () with | [] -> () | g -> let fuel = List.Tot.length g in if pick_next_logical dict fuel then resolve_tac_logical dict else // This is only for requires/ensures constraints, which are equalities // There should always be a scheduling of constraints, but it can happen // that some uvar for the type of an equality is not resolved. // If we reach this point, we try to simply call the unifier instead of failing directly solve_all_eqs fuel /// Determining whether the type represented by term [t] corresponds to one of the logical (requires/ensures) goals let typ_contains_req_ens (t:term) : Tac bool = let name, _ = collect_app t in is_any_fvar name [`%req_t; `%ens_t; `%pure_wp; `%pure_pre; `%pure_post] /// Splits goals between separation logic goals (slgoals) and requires/ensures goals (loggoals) let rec filter_goals (l:list goal) : Tac (list goal * list goal) = match l with | [] -> [], [] | hd::tl -> let slgoals, loggoals = filter_goals tl in match term_as_formula' (goal_type hd) with | Comp (Eq t) _ _ -> if Some? t then let b = typ_contains_req_ens (Some?.v t) in if b then ( slgoals, hd::loggoals ) else ( hd::slgoals, loggoals ) else ( hd::slgoals, loggoals ) | App t _ -> if is_fvar t (`%squash) then hd::slgoals, loggoals else slgoals, loggoals | _ -> slgoals, loggoals let is_true (t:term) () : Tac unit = match term_as_formula t with | True_ -> exact (`()) | _ -> raise Goal_not_trivial /// Solve the maybe_emp goals: /// Normalize to unfold maybe_emp(_dep) and the reduce the if/then/else, and /// solve the goal (either an equality through trefl, or True through trivial) let rec solve_maybe_emps (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | _::_ -> let f = term_as_formula' (cur_goal ()) in ( match f with | App hd0 t -> if not (is_fvar hd0 (`%squash)) then later () else let hd, args = collect_app t in if hd `is_fvar` (`%maybe_emp) then (norm [delta_only [`%maybe_emp]; iota; zeta; primops; simplify]; let g = cur_goal () in or_else (is_true g) trefl) else if hd `is_fvar` (`%maybe_emp_dep) then (norm [delta_only [`%maybe_emp_dep]; iota; zeta; primops; simplify]; let g = cur_goal () in or_else (is_true g) (fun _ -> ignore (forall_intro ()); trefl ())) else later() | _ -> later() ); solve_maybe_emps (fuel - 1) /// Normalizes all the return_pre annotations once they are not needed anymore let rec norm_return_pre (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | _::_ -> norm [delta_only [`%return_pre]]; later(); norm_return_pre (fuel - 1) let print_goal (g:goal) : Tac string = let t = goal_type g in term_to_string t let print_goals (g:list goal) : Tac string = let strs = map print_goal g in String.concat "\n" strs /// The entry point of the frame inference tactic: /// The resolve_implicits; framing_implicit annotation indicates that this tactic should /// be called by the F* typechecker to solve all implicits annotated with the `framing_implicit` attribute. /// The `plugin` attribute ensures that this tactic is compiled, and executed natively for performance reasons let init_resolve_tac' (dict: _) : Tac unit = // We split goals between framing goals, about slprops (slgs) // and goals related to requires/ensures, that depend on slprops (loggs) let slgs, loggs = filter_goals (goals()) in // print ("SL Goals: \n" ^ print_goals slgs); // print ("Logical goals: \n" ^ print_goals loggs); // We first solve the slprops set_goals slgs; // We solve all the maybe_emp goals first: All "extra" frames are directly set to emp solve_maybe_emps (List.Tot.length (goals ())); // We first solve all indirection equalities that will not lead to imprecise unification // i.e. we can solve all equalities inserted by layered effects, except the ones corresponding // to the preconditions of a pure return solve_indirection_eqs (List.Tot.length (goals())); // To debug, it is best to look at the goals at this stage. Uncomment the next line // dump "initial goals"; // We can now solve the equalities for returns solve_return_eqs (List.Tot.length (goals())); // It is important to not normalize the return_pre equalities before solving them // Else, we lose some variables dependencies, leading to the tactic being stuck // See test7 in FramingTestSuite for more explanations of what is failing // Once unification has been done, we can then safely normalize and remove all return_pre norm_return_pre (List.Tot.length (goals())); // Finally running the core of the tactic, scheduling and solving goals resolve_tac dict; // We now solve the requires/ensures goals, which are all equalities // All slprops are resolved by now set_goals loggs; resolve_tac_logical dict [@@ resolve_implicits; framing_implicit; plugin] let init_resolve_tac () : Tac unit = init_resolve_tac' [] (* AF: There probably is a simpler way to get from p to squash p in a tactic, so that we can use apply_lemma *) let squash_and p (x:squash (p /\ True)) : (p /\ True) = let x : squash (p `Prims.pair` True) = FStar.Squash.join_squash x in x /// Calling into the framing tactic to ensure that the vprop whose selector we are trying to access is in the context [@@plugin] let selector_tactic () : Tac unit = apply (`squash_and); apply_lemma (`intro_can_be_split_frame); flip (); norm [delta_only [ `%CE.__proj__CM__item__unit; `%CE.__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p) /// Specific tactic used during the SteelAtomicBase and SteelBase effect definitions: /// This allows us to write more complex if_then_else combinators, while proving them /// sound with respect to subcomp
false
false
Steel.Effect.Common.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val ite_soundness_tac: Prims.unit -> Tac unit
[]
Steel.Effect.Common.ite_soundness_tac
{ "file_name": "lib/steel/Steel.Effect.Common.fsti", "git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e", "git_url": "https://github.com/FStarLang/steel.git", "project_name": "steel" }
_: Prims.unit -> FStar.Tactics.Effect.Tac Prims.unit
{ "end_col": 8, "end_line": 3168, "start_col": 37, "start_line": 3143 }
FStar.Tactics.Effect.Tac
val solve_maybe_emps (fuel: nat) : Tac unit
[ { "abbrev": false, "full_module": "FStar.Reflection.V2.Derived.Lemmas", "short_module": null }, { "abbrev": true, "full_module": "FStar.Algebra.CommMonoid.Equiv", "short_module": "CE" }, { "abbrev": false, "full_module": "FStar.Tactics.CanonCommMonoidSimple.Equiv", "short_module": null }, { "abbrev": false, "full_module": "FStar.Tactics.V2", "short_module": null }, { "abbrev": true, "full_module": "FStar.Tactics.V2", "short_module": "T" }, { "abbrev": false, "full_module": "FStar.Ghost", "short_module": null }, { "abbrev": true, "full_module": "FStar.FunctionalExtensionality", "short_module": "FExt" }, { "abbrev": true, "full_module": "Steel.Memory", "short_module": "Mem" }, { "abbrev": false, "full_module": "Steel.Memory", "short_module": null }, { "abbrev": false, "full_module": "Steel.Effect", "short_module": null }, { "abbrev": false, "full_module": "Steel.Effect", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let rec solve_maybe_emps (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | _::_ -> let f = term_as_formula' (cur_goal ()) in ( match f with | App hd0 t -> if not (is_fvar hd0 (`%squash)) then later () else let hd, args = collect_app t in if hd `is_fvar` (`%maybe_emp) then (norm [delta_only [`%maybe_emp]; iota; zeta; primops; simplify]; let g = cur_goal () in or_else (is_true g) trefl) else if hd `is_fvar` (`%maybe_emp_dep) then (norm [delta_only [`%maybe_emp_dep]; iota; zeta; primops; simplify]; let g = cur_goal () in or_else (is_true g) (fun _ -> ignore (forall_intro ()); trefl ())) else later() | _ -> later() ); solve_maybe_emps (fuel - 1)
val solve_maybe_emps (fuel: nat) : Tac unit let rec solve_maybe_emps (fuel: nat) : Tac unit =
true
null
false
if fuel = 0 then () else match goals () with | [] -> () | _ :: _ -> let f = term_as_formula' (cur_goal ()) in (match f with | App hd0 t -> if not (is_fvar hd0 (`%squash)) then later () else let hd, args = collect_app t in if hd `is_fvar` (`%maybe_emp) then (norm [delta_only [`%maybe_emp]; iota; zeta; primops; simplify]; let g = cur_goal () in or_else (is_true g) trefl) else if hd `is_fvar` (`%maybe_emp_dep) then (norm [delta_only [`%maybe_emp_dep]; iota; zeta; primops; simplify]; let g = cur_goal () in or_else (is_true g) (fun _ -> ignore (forall_intro ()); trefl ())) else later () | _ -> later ()); solve_maybe_emps (fuel - 1)
{ "checked_file": "Steel.Effect.Common.fsti.checked", "dependencies": [ "Steel.Memory.fsti.checked", "prims.fst.checked", "FStar.Tactics.V2.fst.checked", "FStar.Tactics.CanonCommMonoidSimple.Equiv.fst.checked", "FStar.String.fsti.checked", "FStar.Squash.fsti.checked", "FStar.Set.fsti.checked", "FStar.Reflection.V2.Derived.Lemmas.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.Base.fst.checked", "FStar.List.Tot.fst.checked", "FStar.Ghost.fsti.checked", "FStar.FunctionalExtensionality.fsti.checked", "FStar.Classical.fsti.checked", "FStar.Algebra.CommMonoid.Equiv.fst.checked" ], "interface_file": false, "source_file": "Steel.Effect.Common.fsti" }
[]
[ "Prims.nat", "Prims.op_Equality", "Prims.int", "Prims.unit", "Prims.bool", "FStar.Tactics.Types.goal", "Prims.list", "Steel.Effect.Common.solve_maybe_emps", "Prims.op_Subtraction", "FStar.Tactics.NamedView.term", "Prims.op_Negation", "FStar.Reflection.V2.Derived.is_fvar", "FStar.Tactics.V2.Derived.later", "FStar.Reflection.V2.Data.argv", "FStar.Tactics.V2.Derived.or_else", "Steel.Effect.Common.is_true", "FStar.Tactics.V2.Derived.trefl", "FStar.Reflection.Types.typ", "FStar.Tactics.V2.Derived.cur_goal", "FStar.Tactics.V2.Builtins.norm", "Prims.Cons", "FStar.Pervasives.norm_step", "FStar.Pervasives.delta_only", "Prims.string", "Prims.Nil", "FStar.Pervasives.iota", "FStar.Pervasives.zeta", "FStar.Pervasives.primops", "FStar.Pervasives.simplify", "FStar.Pervasives.ignore", "FStar.Tactics.NamedView.binding", "FStar.Tactics.V2.Logic.forall_intro", "FStar.Pervasives.Native.tuple2", "FStar.Tactics.V2.SyntaxHelpers.collect_app", "FStar.Reflection.V2.Formula.formula", "FStar.Reflection.V2.Formula.term_as_formula'", "FStar.Tactics.V2.Derived.goals" ]
[]
(* Copyright 2020 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module Steel.Effect.Common open Steel.Memory module Mem = Steel.Memory module FExt = FStar.FunctionalExtensionality open FStar.Ghost /// This module provides various predicates and functions which are common to the /// different Steel effects. /// It also contains the tactic responsible for frame inference through a variant of AC-unification #set-options "--ide_id_info_off" (* Normalization helpers *) irreducible let framing_implicit : unit = () irreducible let __steel_reduce__ : unit = () /// An internal attribute for finer-grained normalization in framing equalities irreducible let __inner_steel_reduce__ : unit = () irreducible let __reduce__ : unit = () irreducible let smt_fallback : unit = () irreducible let ite_attr : unit = () // Needed to avoid some logical vs prop issues during unification with no subtyping [@@__steel_reduce__] unfold let true_p : prop = True module T = FStar.Tactics.V2 let join_preserves_interp (hp:slprop) (m0:hmem hp) (m1:mem{disjoint m0 m1}) : Lemma (interp hp (join m0 m1)) [SMTPat (interp hp (join m0 m1))] = let open Steel.Memory in intro_emp m1; intro_star hp emp m0 m1; affine_star hp emp (join m0 m1) (* Definition of a selector for a given slprop *) /// A selector of type `a` for a separation logic predicate hp is a function /// from a memory where the predicate hp holds, which returns a value of type `a`. /// The effect GTot indicates that selectors are ghost functions, used for specification /// and proof purposes only let selector' (a:Type0) (hp:slprop) = hmem hp -> GTot a /// Self-framing property for selectors let sel_depends_only_on (#a:Type) (#hp:slprop) (sel:selector' a hp) = forall (m0:hmem hp) (m1:mem{disjoint m0 m1}). (interp_depends_only_on hp; ( sel m0 == sel (join m0 m1))) /// Additional property that selectors must satisfy, related to internals of /// the Steel memory model encoding let sel_depends_only_on_core (#a:Type) (#hp:slprop) (sel:selector' a hp) = forall (m0:hmem hp). sel m0 == sel (core_mem m0) /// Full definition of a selector, as a function which satisfies the two predicates above let selector (a:Type) (hp:slprop) : Type = sel:selector' a hp{sel_depends_only_on sel /\ sel_depends_only_on_core sel} /// The basis of our selector framework: Separation logic assertions enhanced with selectors /// Note that selectors are "optional", it is always possible to use a non-informative selector, /// such as fun _ -> () and to rely on the standard separation logic reasoning [@@ erasable] noeq type vprop' = { hp: slprop u#1; t:Type0; sel: selector t hp} (* Lifting the star operator to an inductive type makes normalization and implementing some later functions easier *) [@@__steel_reduce__; erasable] noeq type vprop = | VUnit : vprop' -> vprop | VStar: vprop -> vprop -> vprop (* A generic lift from slprop to vprop with a non-informative selector *) [@@ __steel_reduce__] let to_vprop' (p:slprop) = {hp = p; t = unit; sel = fun _ -> ()} [@@ __steel_reduce__] unfold let to_vprop (p:slprop) = VUnit (to_vprop' p) /// Normalization steps for norm below. /// All functions marked as `unfold`, or with the `__steel_reduce__` attribute will be reduced, /// as well as some functions internal to the selector framework unfold let normal_steps = [delta_attr [`%__steel_reduce__; `%__inner_steel_reduce__]; delta_only [`%Mkvprop'?.t; `%Mkvprop'?.hp; `%Mkvprop'?.sel; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit]; delta_qualifier ["unfold"]; iota;zeta;primops; simplify] /// The core normalization primitive used to simplify Verification Conditions before encoding /// them to an SMT solver. unfold let normal (#a:Type) (x:a) = norm normal_steps x /// An abbreviation for the VStar constructor, allowing to use it with infix notation [@@ __steel_reduce__; __reduce__] let star = VStar /// Extracting the underlying separation logic assertion from a vprop [@@ __steel_reduce__] let rec hp_of (p:vprop) = match p with | VUnit p -> p.hp | VStar p1 p2 -> hp_of p1 `Mem.star` hp_of p2 /// Extracting the selector type from a vprop [@@ __steel_reduce__] let rec t_of (p:vprop) = match p with | VUnit p -> p.t | VStar p1 p2 -> t_of p1 * t_of p2 /// Extracting the selector from a vprop [@@ __steel_reduce__] let rec sel_of (p:vprop) : GTot (selector (t_of p) (hp_of p)) = match p with | VUnit p -> fun h -> p.sel h | VStar p1 p2 -> let sel1 = sel_of p1 in let sel2 = sel_of p2 in fun h -> (sel1 h, sel2 h) /// Type abbreviations for separation logic pre- and postconditions of the Steel effects type pre_t = vprop type post_t (a:Type) = a -> vprop /// An annotation to indicate which separation logic predicates correspond to monadic computations /// These computations are handled in a specific manner in the framing tactic; they correspond to places where /// the context shrinks from all local variables in the computation to variables available at the toplevel let return_pre (p:vprop) : vprop = p noextract let hmem (p:vprop) = hmem (hp_of p) /// Abstract predicate for vprop implication. Currently implemented as an implication on the underlying slprop val can_be_split (p q:pre_t) : Type0 /// Exposing the implementation of `can_be_split` when needed for proof purposes val reveal_can_be_split (_:unit) : Lemma (forall p q. can_be_split p q == Mem.slimp (hp_of p) (hp_of q)) /// A targeted version of the above val can_be_split_interp (r r':vprop) (h:hmem r) : Lemma (requires can_be_split r r') (ensures interp (hp_of r') h) /// A dependent version of can_be_split, to be applied to dependent postconditions let can_be_split_forall (#a:Type) (p q:post_t a) = forall x. can_be_split (p x) (q x) /// A version of can_be_split which is indexed by a proposition, which can be used for equalities abduction let can_be_split_dep (p:prop) (t1 t2:pre_t) = p ==> can_be_split t1 t2 /// A dependent version of the above predicate let can_be_split_forall_dep (#a:Type) (p:a -> prop) (t1 t2:post_t a) = forall (x:a). p x ==> can_be_split (t1 x) (t2 x) (* Some lemmas about the can_be_split* predicates, to be used as rewriting rules for the abstract predicates *) val can_be_split_trans (p q r:vprop) : Lemma (requires p `can_be_split` q /\ q `can_be_split` r) (ensures p `can_be_split` r) let can_be_split_trans_rev (p q r:vprop) : Lemma (requires q `can_be_split` r /\ p `can_be_split` q) (ensures p `can_be_split` r) = can_be_split_trans p q r val can_be_split_star_l (p q:vprop) : Lemma (ensures (p `star` q) `can_be_split` p) [SMTPat ((p `star` q) `can_be_split` p)] val can_be_split_star_r (p q:vprop) : Lemma (ensures (p `star` q) `can_be_split` q) [SMTPat ((p `star` q) `can_be_split` q)] val can_be_split_refl (p:vprop) : Lemma (p `can_be_split` p) [SMTPat (p `can_be_split` p)] val can_be_split_congr_l (p q r: vprop) : Lemma (requires (p `can_be_split` q)) (ensures ((p `star` r) `can_be_split` (q `star` r))) val can_be_split_congr_r (p q r: vprop) : Lemma (requires (p `can_be_split` q)) (ensures ((r `star` p) `can_be_split` (r `star` q))) let prop_and (p1 p2: prop) : Tot prop = p1 /\ p2 let can_be_split_forall_dep_trans_rev (#a: Type) (cond1 cond2: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond2 q r /\ can_be_split_forall_dep cond1 p q)) (ensures (can_be_split_forall_dep (fun x -> cond1 x `prop_and` cond2 x) p r)) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_trans x y) z) let can_be_split_forall_dep_congr_l (#a: Type) (cond: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond p q)) (ensures (can_be_split_forall_dep cond (fun x -> p x `star` r x) (fun x -> q x `star` r x))) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_congr_l x y) z) let can_be_split_forall_dep_congr_r (#a: Type) (cond: a -> prop) (p q r: post_t a) : Lemma (requires (can_be_split_forall_dep cond p q)) (ensures (can_be_split_forall_dep cond (fun x -> r x `star` p x) (fun x -> r x `star` q x))) = Classical.forall_intro_3 (fun x y z -> Classical.move_requires (can_be_split_congr_r x y) z) /// To simplify the implementation of the framing tactic, dependent equivalence /// is defined as a double dependent implication let equiv_forall (#a:Type) (t1 t2:post_t a) : Type0 = t1 `can_be_split_forall` t2 /\ t2 `can_be_split_forall` t1 /// This equivalence models a context restriction at the end of a Steel computation; /// note that t2 does not depend on the value of type `a`, but the two vprops must be /// equivalent let can_be_split_post (#a #b:Type) (t1:a -> post_t b) (t2:post_t b) = forall (x:a). equiv_forall (t1 x) t2 /// Lifting the equivalence relation to vprops. Two vprops are equivalent if the underlying slprops /// are equivalent val equiv (p q:vprop) : prop /// Revealing the definition of vprop equivalence when needed for proof purposes. /// In other cases, the predicate is abstract val reveal_equiv (p q:vprop) : Lemma (p `equiv` q <==> hp_of p `Mem.equiv` hp_of q) (* A restricted view of the heap, that only allows to access selectors of the current slprop *) let rmem' (pre:vprop) = FExt.restricted_g_t (r0:vprop{can_be_split pre r0}) (fun r0 -> normal (t_of r0)) /// Ensuring that rmems encapsulate the structure induced by the separation logic star val valid_rmem (#frame:vprop) (h:rmem' frame) : prop unfold let rmem (pre:vprop) = h:rmem' pre{valid_rmem h} /// Exposing the definition of mk_rmem to better normalize Steel VCs unfold noextract let unrestricted_mk_rmem (r:vprop) (h:hmem r) = fun (r0:vprop{r `can_be_split` r0}) -> can_be_split_interp r r0 h; sel_of r0 h [@@ __inner_steel_reduce__] noextract let mk_rmem' (r:vprop) (h:hmem r) : Tot (rmem' r) = FExt.on_dom_g (r0:vprop{r `can_be_split` r0}) (unrestricted_mk_rmem r h) val lemma_valid_mk_rmem (r:vprop) (h:hmem r) : Lemma (valid_rmem (mk_rmem' r h)) [@@ __inner_steel_reduce__] noextract let mk_rmem (r:vprop) (h:hmem r) : Tot (rmem r) = lemma_valid_mk_rmem r h; mk_rmem' r h val reveal_mk_rmem (r:vprop) (h:hmem r) (r0:vprop{r `can_be_split` r0}) : Lemma (ensures reveal_can_be_split(); (mk_rmem r h) r0 == sel_of r0 h) (* Logical pre and postconditions can only access the restricted view of the heap *) type req_t (pre:pre_t) = rmem pre -> Type0 type ens_t (pre:pre_t) (a:Type) (post:post_t a) = rmem pre -> (x:a) -> rmem (post x) -> Type0 (* Empty assertion *) val emp : vprop /// When needed for proof purposes, the empty assertion is a direct lift of the /// empty assertion from Steel.Memory val reveal_emp (_:unit) : Lemma (hp_of emp == Mem.emp /\ t_of emp == unit) /// Lifting pure predicates to vprop [@@__steel_reduce__] unfold let pure (p:prop) = to_vprop (pure p) /// Framing predicates for the Steel effect. If the current computation has already /// been framed, then the additional frame is the empty predicate let maybe_emp (framed:bool) (frame:pre_t) = if framed then frame == emp else True /// Dependent version of the above predicate, usable in dependent postconditions let maybe_emp_dep (#a:Type) (framed:bool) (frame:post_t a) = if framed then (forall x. frame x == emp) else True (* focus_rmem is an additional restriction of our view of memory. We expose it here to be able to reduce through normalization; Any valid application of focus_rmem h will be reduced to the application of h *) [@@ __steel_reduce__] unfold let unrestricted_focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) = fun (r':vprop{can_be_split r0 r'}) -> can_be_split_trans r r0 r'; h r' [@@ __inner_steel_reduce__] let focus_rmem' (#r: vprop) (h: rmem r) (r0: vprop{r `can_be_split` r0}) : Tot (rmem' r0) = FExt.on_dom_g (r':vprop{can_be_split r0 r'}) (unrestricted_focus_rmem h r0) val lemma_valid_focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) : Lemma (valid_rmem (focus_rmem' h r0)) [@@ __inner_steel_reduce__] let focus_rmem (#r:vprop) (h:rmem r) (r0:vprop{r `can_be_split` r0}) : Tot (rmem r0) = lemma_valid_focus_rmem h r0; focus_rmem' h r0 /// Exposing that calling focus_rmem on the current context corresponds to an equality let focus_rmem_refl (r:vprop) (h:rmem r) : Lemma (focus_rmem #r h r == h) = FStar.FunctionalExtensionality.extensionality_g _ _ (focus_rmem #r h r) h open FStar.Tactics.V2 /// State that all "atomic" subresources have the same selectors on both views. /// The predicate has the __steel_reduce__ attribute, ensuring that VC normalization /// will reduce it to a conjunction of equalities on atomic subresources /// This predicate is also marked as `strict_on_arguments` on [frame], ensuring that /// it will not be reduced when the frame is symbolic /// Instead, the predicate will be rewritten to an equality using `lemma_frame_equalities` below [@@ __steel_reduce__; strict_on_arguments [0]] let rec frame_equalities' (frame:vprop) (h0:rmem frame) (h1:rmem frame) : Type0 = begin match frame with | VUnit p -> h0 frame == h1 frame | VStar p1 p2 -> can_be_split_star_l p1 p2; can_be_split_star_r p1 p2; let h01 = focus_rmem h0 p1 in let h11 = focus_rmem h1 p1 in let h02 = focus_rmem h0 p2 in let h12 = focus_rmem h1 p2 in frame_equalities' p1 h01 h11 /\ frame_equalities' p2 h02 h12 end /// This lemma states that frame_equalities is the same as an equality on the top-level frame. /// The uncommon formulation with an extra [p] is needed to use in `rewrite_with_tactic`, /// where the goal is of the shape `frame_equalities frame h0 h1 == ?u` /// The rewriting happens below, in `frame_vc_norm` val lemma_frame_equalities (frame:vprop) (h0:rmem frame) (h1:rmem frame) (p:Type0) : Lemma (requires (h0 frame == h1 frame) == p) (ensures frame_equalities' frame h0 h1 == p) /// A special case for frames about emp. val lemma_frame_emp (h0:rmem emp) (h1:rmem emp) (p:Type0) : Lemma (requires True == p) (ensures frame_equalities' emp h0 h1 == p) /// A variant of conjunction elimination, suitable to the equality goals during rewriting val elim_conjunction (p1 p1' p2 p2':Type0) : Lemma (requires p1 == p1' /\ p2 == p2') (ensures (p1 /\ p2) == (p1' /\ p2')) /// Normalization and rewriting step for generating frame equalities. /// The frame_equalities function has the strict_on_arguments attribute on the [frame], /// ensuring that it is not reduced when the frame is symbolic. /// When that happens, we want to replace frame_equalities by an equality on the frame, /// mimicking reduction [@@plugin] let frame_vc_norm () : Tac unit = with_compat_pre_core 0 (fun _ -> // Do not normalize mk_rmem/focus_rmem to simplify application of // the reflexivity lemma on frame_equalities' norm [delta_attr [`%__steel_reduce__]; delta_only [`%Mkvprop'?.t; `%Mkvprop'?.hp; `%Mkvprop'?.sel; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit]; delta_qualifier ["unfold"]; iota;zeta;primops; simplify]; // After reduction, the term to rewrite might be of the shape // (frame_equalities' ... /\ frame_equalities' .. /\ ...) == ?u, // with some frame_equalities' possibly already fully reduced // We repeatedly split the clause and extract the term on the left // to generate equalities on atomic subresources ignore (repeat (fun _ -> // Try to split the conjunction. If there is no conjunction, we exit the repeat apply_lemma (`elim_conjunction); // Dismiss the two uvars created for the RHS, they'll be solved by unification dismiss (); dismiss (); // The first goal is the left conjunction split (); // Removes the frame equality if it is about emp or_else (fun _ -> apply_lemma (`lemma_frame_emp); dismiss()) (fun _ -> ()); // Rewrites the frame_equalities if it wasn't yet reduced or_else (fun _ -> apply_lemma (`lemma_frame_equalities); dismiss ()) (fun _ -> ()); norm normal_steps; // Finally solve the uvar, finishing the rewriting for this clause trefl () )); // Removes the frame equality if it is about emp or_else (fun _ -> apply_lemma (`lemma_frame_emp); dismiss()) (fun _ -> ()); // We do not have conjunctions anymore, we try to apply the frame_equalities rewriting // If it fails, the frame was not symbolic, so there is nothing to do or_else (fun _ -> apply_lemma (`lemma_frame_equalities); dismiss ()) (fun _ -> ()); norm normal_steps; trefl ()) [@@ __steel_reduce__] unfold let frame_equalities (frame:vprop) (h0:rmem frame) (h1:rmem frame) : prop = rewrite_with_tactic frame_vc_norm (frame_equalities' frame h0 h1) /// More lemmas about the abstract can_be_split predicates, to be used as /// rewriting rules in the tactic below val can_be_split_dep_refl (p:vprop) : Lemma (can_be_split_dep true_p p p) val equiv_can_be_split (p1 p2:vprop) : Lemma (requires p1 `equiv` p2) (ensures p1 `can_be_split` p2) val intro_can_be_split_frame (p q:vprop) (frame:vprop) : Lemma (requires q `equiv` (p `star` frame)) (ensures can_be_split q p /\ True) val can_be_split_post_elim (#a #b:Type) (t1:a -> post_t b) (t2:post_t b) : Lemma (requires (forall (x:a) (y:b). t1 x y `equiv` t2 y)) (ensures t1 `can_be_split_post` t2) val equiv_forall_refl (#a:Type) (t:post_t a) : Lemma (t `equiv_forall` t) val equiv_forall_elim (#a:Type) (t1 t2:post_t a) : Lemma (requires (forall (x:a). t1 x `equiv` t2 x)) (ensures t1 `equiv_forall` t2) open FStar.Tactics.CanonCommMonoidSimple.Equiv (* equiv is an equivalence relation on vprops *) /// Lemmas establishing the equivalence properties on equiv val equiv_refl (x:vprop) : Lemma (equiv x x) val equiv_sym (x y:vprop) : Lemma (requires equiv x y) (ensures equiv y x) val equiv_trans (x y z:vprop) : Lemma (requires equiv x y /\ equiv y z) (ensures equiv x z) module CE = FStar.Algebra.CommMonoid.Equiv /// Equiv is an equivalence relation for vprops elements inline_for_extraction noextract let req : CE.equiv vprop = CE.EQ equiv equiv_refl equiv_sym equiv_trans (* Star induces a commutative monoid for the equiv equivalence relation *) /// Lemmas establishing the commutative monoid properties val cm_identity (x:vprop) : Lemma ((emp `star` x) `equiv` x) val star_commutative (p1 p2:vprop) : Lemma ((p1 `star` p2) `equiv` (p2 `star` p1)) val star_associative (p1 p2 p3:vprop) : Lemma (((p1 `star` p2) `star` p3) `equiv` (p1 `star` (p2 `star` p3))) val star_congruence (p1 p2 p3 p4:vprop) : Lemma (requires p1 `equiv` p3 /\ p2 `equiv` p4) (ensures (p1 `star` p2) `equiv` (p3 `star` p4)) /// Star induces a commutative monoid on vprops [@__steel_reduce__] inline_for_extraction noextract let rm : CE.cm vprop req = CE.CM emp star cm_identity star_associative star_commutative star_congruence (*** Vprop combinators ***) (* Refining a vprop with a selector predicate *) /// Separation logic predicate stating the validity of a vprop with an additional refinement on its selector val vrefine_hp (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot (slprop u#1) /// Exposing the validity of the above predicate when needed for proof purposes val interp_vrefine_hp (v: vprop) (p: (normal (t_of v) -> Tot prop)) (m: mem) : Lemma (interp (vrefine_hp v p) m <==> (interp (hp_of v) m /\ p (sel_of v m))) /// Selector type for a refined vprop [@__steel_reduce__] let vrefine_t (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot Type = (x: t_of v {p x}) /// Selector of a refined vprop. Returns a value which satisfies the refinement predicate val vrefine_sel (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot (selector (vrefine_t v p) (vrefine_hp v p)) /// Exposing the definition of the refined selector val vrefine_sel_eq (v: vprop) (p: (normal (t_of v) -> Tot prop)) (m: Mem.hmem (vrefine_hp v p)) : Lemma ( interp (hp_of v) m /\ vrefine_sel v p m == sel_of v m ) // [SMTPat ((vrefine_sel v p) m)] // FIXME: this pattern causes Z3 "wrong number of argument" errors /// Combining the above pieces to define a vprop refined by a selector prediacte [@__steel_reduce__] let vrefine' (v: vprop) (p: (normal (t_of v) -> Tot prop)) : Tot vprop' = { hp = vrefine_hp v p; t = vrefine_t v p; sel = vrefine_sel v p; } [@__steel_reduce__] let vrefine (v: vprop) (p: (normal (t_of v) -> Tot prop)) = VUnit (vrefine' v p) (* Dependent star for vprops *) /// Separation logic predicate corresponding to a dependent star, /// where the second predicate depends on the selector value of the first val vdep_hp (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot (slprop u#1) /// Exposing the validity of the above predicate when needed for proof purposes val interp_vdep_hp (v: vprop) (p: ( (t_of v) -> Tot vprop)) (m: mem) : Lemma (interp (vdep_hp v p) m <==> (interp (hp_of v) m /\ interp (hp_of v `Mem.star` hp_of (p (sel_of v m))) m)) /// Helper to define the selector type of the second component of the dependent star let vdep_payload (v: vprop) (p: ( (t_of v) -> Tot vprop)) (x: t_of v) : Tot Type = t_of (p x) /// Selector type for the dependent star: A dependent tuple, where the second component's type depends on the first vprop let vdep_t (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot Type = dtuple2 (t_of v) (vdep_payload v p) /// Selector for the dependent star val vdep_sel (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot (selector (vdep_t v p) (vdep_hp v p)) /// Exposing the definition of the dependent star's selector when needed for proof purposes val vdep_sel_eq (v: vprop) (p: ( (t_of v) -> Tot vprop)) (m: Mem.hmem (vdep_hp v p)) : Lemma ( interp (hp_of v) m /\ begin let x = sel_of v m in interp (hp_of (p x)) m /\ vdep_sel v p m == (| x, sel_of (p x) m |) end ) /// Combining the elements above to create a dependent star vprop [@__steel_reduce__] let vdep' (v: vprop) (p: ( (t_of v) -> Tot vprop)) : Tot vprop' = { hp = vdep_hp v p; t = vdep_t v p; sel = vdep_sel v p; } [@__steel_reduce__] let vdep (v: vprop) (p: ( (t_of v) -> Tot vprop)) = VUnit (vdep' v p) (* Selector rewrite combinator *) /// The selector of a rewrite combinator applies a function `f` to the current selector of a vprop. val vrewrite_sel (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot (selector t (normal (hp_of v))) /// Exposing the definition of the above selector val vrewrite_sel_eq (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) (h: Mem.hmem (normal (hp_of v))) : Lemma ((vrewrite_sel v f <: selector' _ _) h == f ((normal (sel_of v) <: selector' _ _) h)) // [SMTPat (vrewrite_sel v f h)] // FIXME: this pattern causes Z3 "wrong number of argument" errors /// Combining the above elements to create a rewrite vprop [@__steel_reduce__] let vrewrite' (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot vprop' = { hp = normal (hp_of v); t = t; sel = vrewrite_sel v f; } [@__steel_reduce__] let vrewrite (v: vprop) (#t: Type) (f: (normal (t_of v) -> GTot t)) : Tot vprop = VUnit (vrewrite' v f) (*** Framing tactic ***) (* Specialize visit_tm from the standard F* tactic library to reimplement name_appears_in. AF: As of Jan 14, 2021, calling name_appears_in from FStar.Tactics.Derived leads to a segfault *) exception Appears let on_sort_binder (f : term -> Tac unit) (b:binder) : Tac unit = f b.sort let rec visit_tm (ff : term -> Tac unit) (t : term) : Tac unit = let tv = inspect t in (match tv with | Tv_FVar _ | Tv_UInst _ _ | Tv_Var _ | Tv_BVar _ -> () | Tv_Type _ -> () | Tv_Const c -> () | Tv_Uvar i u -> () | Tv_Unsupp -> () | Tv_Unknown -> () | Tv_Arrow b c -> on_sort_binder ff b; visit_comp ff c | Tv_Abs b t -> let b = on_sort_binder (visit_tm ff) b in visit_tm ff t | Tv_App l (r, q) -> visit_tm ff l; visit_tm ff r | Tv_Refine b r -> on_sort_binder ff b; visit_tm ff r | Tv_Let r attrs b def t -> on_sort_binder ff b; visit_tm ff def; visit_tm ff t | Tv_Match sc _ brs -> visit_tm ff sc; iter (visit_br ff) brs | Tv_AscribedT e t topt _ -> visit_tm ff e; visit_tm ff t | Tv_AscribedC e c topt _ -> visit_tm ff e ); ff t and visit_br (ff : term -> Tac unit) (b:branch) : Tac unit = let (p, t) = b in visit_tm ff t and visit_comp (ff : term -> Tac unit) (c : comp) : Tac unit = let cv = inspect_comp c in match cv with | C_Total ret -> visit_tm ff ret | C_GTotal ret -> visit_tm ff ret | C_Lemma pre post pats -> visit_tm ff pre; visit_tm ff post; visit_tm ff pats | C_Eff us eff res args decrs -> visit_tm ff res; iter (fun (a, q) -> visit_tm ff a) args; iter (visit_tm ff) decrs /// Decides whether a top-level name [nm] syntactically /// appears in the term [t]. let name_appears_in (nm:name) (t:term) : Tac bool = let ff (t : term) : Tac unit = match inspect t with | Tv_FVar fv -> if inspect_fv fv = nm then raise Appears | t -> () in try ignore (visit_tm ff t); false with | Appears -> true | e -> raise e /// Checks whether term [t] appears in term [i] let term_appears_in (t:term) (i:term) : Tac bool = name_appears_in (explode_qn (term_to_string t)) i /// We define a small language to handle arbitrary separation logic predicates. /// Separation logic predicates are encoded as atoms for which equality is decidable, /// here represented as integers let atom : eqtype = int let rec atoms_to_string (l:list atom) = match l with | [] -> "" | hd::tl -> string_of_int hd ^ " " ^ atoms_to_string tl /// Reflecting the structure of our separation logic on atmos type exp : Type = | Unit : exp | Mult : exp -> exp -> exp | Atom : atom -> exp /// A map from atoms to the terms they represent. /// The second component of the term corresponds to a default element, /// ensuring we never raise an exception when trying to access an element in the map let amap (a:Type) = list (atom * a) * a /// An empty atom map: The list map is empty let const (#a:Type) (xa:a) : amap a = ([], xa) /// Accessing an element in the atom map // We reimplement List.Tot.Base.assoc because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_assoc (#key: eqtype) (#value: Type) (k: key) (dict: list (key & value)) : Pure (option value) (requires True) (ensures (fun res -> res == List.Tot.assoc k dict)) = match dict with | [] -> None | (k', v') :: q -> if k = k' then Some v' else my_assoc k q let select (#a:Type) (x:atom) (am:amap a) : Tot a = match my_assoc #atom #a x (fst am) with | Some a -> a | _ -> snd am /// Updating the atom map. Since select finds the first element corresponding to /// the atom in the list and we do not have any remove function, /// we can simply append the new element at the head without removing any possible /// previous element let update (#a:Type) (x:atom) (xa:a) (am:amap a) : amap a = (x, xa)::fst am, snd am /// Check whether the current term is an unresolved vprop unification variable. /// This can happen if either it is a uvar, or it is an unresolved dependent /// vprop uvar which is applied to some argument let is_uvar (t:term) : Tac bool = match inspect t with | Tv_Uvar _ _ -> true | Tv_App _ _ -> let hd, args = collect_app t in Tv_Uvar? (inspect hd) | _ -> false /// For a given term t, collect all terms in the list l with the same head symbol let rec get_candidates (t:term) (l:list term) : Tac (list term) = let name, _ = collect_app t in match l with | [] -> [] | hd::tl -> let n, _ = collect_app hd in if term_eq_old n name then ( hd::(get_candidates t tl) ) else get_candidates t tl /// Try to remove a term that is exactly matching, not just that can be unified let rec trivial_cancel (t:atom) (l:list atom) = match l with | [] -> false, l | hd::tl -> if hd = t then // These elements match, we remove them true, tl else (let b, res = trivial_cancel t tl in b, hd::res) /// Call trivial_cancel on all elements of l1. /// The first two lists returned are the remainders of l1 and l2. /// The last two lists are the removed parts of l1 and l2, with /// the additional invariant that they are equal let rec trivial_cancels (l1 l2:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l1 with | [] -> [], l2, [], [] | hd::tl -> let b, l2' = trivial_cancel hd l2 in let l1', l2', l1_del, l2_del = trivial_cancels tl l2' am in (if b then l1' else hd::l1'), l2', (if b then hd::l1_del else l1_del), (if b then hd::l2_del else l2_del) exception Failed exception Success /// Helper to print the terms corresponding to the current list of atoms let rec print_atoms (l:list atom) (am:amap term) : Tac string = match l with | [] -> "" | [hd] -> term_to_string (select hd am) | hd::tl -> term_to_string (select hd am) ^ " * " ^ print_atoms tl am /// For a list of candidates l, count the number that can unify with t. /// Does not try to unify with a uvar, this will be done at the very end. /// Tries to unify with slprops with a different head symbol, it might /// be an abbreviation let rec try_candidates (t:atom) (l:list atom) (am:amap term) : Tac (atom * int) = match l with | [] -> t, 0 | hd::tl -> if is_uvar (select hd am) then (try_candidates t tl am) else // Encapsulate unify in a try/with to ensure unification is not actually performed let res = try if unify (select t am) (select hd am) then raise Success else raise Failed with | Success -> true | _ -> false in let t', n' = try_candidates t tl am in if res && hd <> t' then hd, 1 + n' else t', n' /// Remove the given term from the list. Only to be called when /// try_candidates succeeded let rec remove_from_list (t:atom) (l:list atom) : Tac (list atom) = match l with | [] -> fail "atom in remove_from_list not found: should not happen"; [] | hd::tl -> if t = hd then tl else hd::remove_from_list t tl /// Check if two lists of slprops are equivalent by recursively calling /// try_candidates. /// Assumes that only l2 contains terms with the head symbol unresolved. /// It returns all elements that were not resolved during this iteration *) let rec equivalent_lists_once (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l1 with | [] -> [], l2, l1_del, l2_del | hd::tl -> let t, n = try_candidates hd l2 am in if n = 1 then ( let l2 = remove_from_list t l2 in equivalent_lists_once tl l2 (hd::l1_del) (t::l2_del) am ) else ( // Either too many candidates for this scrutinee, or no candidate but the uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once tl l2 l1_del l2_del am in hd::rem1, rem2, l1'_del, l2'_del ) /// Check if two lists of slprops are equivalent by recursively calling /// try_candidates by iterating on l2. /// Assumes that only l2 contains terms with the head symbol unresolved. /// It returns all elements that were not resolved during this iteration *) /// This is very close to equivalent_lists_once above, but helps making progress /// when l1 contains syntactically equal candidates let rec equivalent_lists_once_l2 (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * list atom * list atom) = match l2 with | [] -> l1, [], l1_del, l2_del | hd::tl -> if is_uvar (select hd am) then // We do not try to match the vprop uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once_l2 l1 tl l1_del l2_del am in rem1, hd::rem2, l1'_del, l2'_del else ( let t, n = try_candidates hd l1 am in if n = 1 then ( let l1 = remove_from_list t l1 in equivalent_lists_once_l2 l1 tl (t::l1_del) (hd::l2_del) am ) else ( // Either too many candidates for this scrutinee, or no candidate but the uvar let rem1, rem2, l1'_del, l2'_del = equivalent_lists_once_l2 l1 tl l1_del l2_del am in rem1, hd::rem2, l1'_del, l2'_del ) ) let get_head (l:list atom) (am:amap term) : term = match l with | [] -> `() | hd::_ -> select hd am /// Checks whether the list of atoms [l] only contains one unresolved uvar let is_only_uvar (l:list atom) (am:amap term) : Tac bool = if List.Tot.Base.length l = 1 then is_uvar (select (List.Tot.Base.hd l) am) else false /// Assumes that u is a uvar, checks that all variables in l can be unified with it. /// Later in the tactic, the uvar will be unified to a star of l *) let rec try_unifying_remaining (l:list atom) (u:term) (am:amap term) : Tac unit = match l with | [] -> () | hd::tl -> try if unify u (select hd am) then raise Success else raise Failed with | Success -> try_unifying_remaining tl u am | _ -> fail ("could not find candidate for scrutinee " ^ term_to_string (select hd am)) /// Is SMT rewriting enabled for this binder let is_smt_binder (b:binder) : Tac bool = let l = b.attrs in not (List.Tot.isEmpty (filter (fun t -> is_fvar t (`%smt_fallback)) l)) /// Creates a new term, where all arguments where SMT rewriting is enabled have been replaced /// by fresh, unconstrained unification variables let rec new_args_for_smt_attrs (env:env) (l:list argv) (ty:typ) : Tac (list argv * list term) = let fresh_ghost_uvar ty = let e = cur_env () in ghost_uvar_env e ty in match l, inspect_unascribe ty with | (arg, aqualv)::tl, Tv_Arrow binder comp -> let needs_smt = is_smt_binder binder in let new_hd = if needs_smt then ( let arg_ty = tc env arg in let uvar = fresh_ghost_uvar arg_ty in unshelve uvar; flip (); (uvar, aqualv) ) else (arg, aqualv) in begin let ty2 = match inspect_comp comp with | C_Total ty2 -> ty2 | C_Eff _ eff_name ty2 _ _ -> if eff_name = ["Prims"; "Tot"] then ty2 else fail "computation type not supported in definition of slprops" | _ -> fail "computation type not supported in definition of slprops" in let tl_argv, tl_terms = new_args_for_smt_attrs env tl ty2 in new_hd::tl_argv, (if needs_smt then arg::tl_terms else tl_terms) end | [], Tv_FVar fv -> [], [] | _ -> fail "should not happen. Is an slprop partially applied?" /// Rewrites all terms in the context to enable SMT rewriting through the use of fresh, unconstrained unification variables let rewrite_term_for_smt (env:env) (am:amap term * list term) (a:atom) : Tac (amap term * list term) = let am, prev_uvar_terms = am in let term = select a am in let hd, args = collect_app term in let t = tc env hd in let new_args, uvar_terms = new_args_for_smt_attrs env args t in let new_term = mk_app hd new_args in update a new_term am, List.Tot.append uvar_terms prev_uvar_terms /// User-facing error message when the framing tactic fails let fail_atoms (#a:Type) (l1 l2:list atom) (am:amap term) : Tac a = fail ("could not find a solution for unifying\n" ^ print_atoms l1 am ^ "\nand\n" ^ print_atoms l2 am) /// Variant of equivalent_lists' below to be called once terms have been rewritten to allow SMT rewriting. /// If unification succeeds and we have unicity of the solution, this tactic will succeed, /// and ultimately create an SMT guard that the two terms are actually equal let rec equivalent_lists_fallback (n:nat) (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * bool) = match l1 with | [] -> begin match l2 with | [] -> (l1_del, l2_del, false) | [hd] -> // Succeed if there is only one uvar left in l2, which can be therefore // be unified with emp if is_uvar (select hd am) then ( // xsdenote is left associative: We put hd at the top to get // ?u `star` p <==> emp `star` p (l1_del, hd :: l2_del, true)) else fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) | _ -> fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) end | _ -> if is_only_uvar l2 am then ( // Terms left in l1, but only a uvar left in l2. // Put all terms left at the end of l1_rem, so that they can be unified // with exactly the uvar because of the structure of xsdenote try_unifying_remaining l1 (get_head l2 am) am; l1_del `List.Tot.append` l1, l2_del `List.Tot.append` l2, false ) else let rem1, rem2, l1_del', l2_del' = equivalent_lists_once l1 l2 l1_del l2_del am in let n' = List.Tot.length rem1 in if n' >= n then // Should always be smaller or equal to n // If it is equal, no progress was made. fail_atoms rem1 rem2 am else equivalent_lists_fallback n' rem1 rem2 l1_del' l2_del' am /// Iterates over all terms in [l2] to prepare them for unification with SMT rewriting let replace_smt_uvars (l1 l2:list atom) (am:amap term) : Tac (amap term * list term) = let env = cur_env () in fold_left (rewrite_term_for_smt env) (am, []) l2 /// Recursively calls equivalent_lists_once. /// Stops when we're done with unification, or when we didn't make any progress /// If we didn't make any progress, we have too many candidates for some terms. /// Accumulates rewritings of l1 and l2 in l1_del and l2_del, with the invariant /// that the two lists are unifiable at any point /// The boolean indicates if there is a leftover empty frame let rec equivalent_lists' (n:nat) (use_smt:bool) (l1 l2 l1_del l2_del:list atom) (am:amap term) : Tac (list atom * list atom * bool * list term) = match l1 with | [] -> begin match l2 with | [] -> (l1_del, l2_del, false, []) | [hd] -> // Succeed if there is only one uvar left in l2, which can be therefore // be unified with emp if is_uvar (select hd am) then ( // xsdenote is left associative: We put hd at the top to get // ?u `star` p <==> emp `star` p (l1_del, hd :: l2_del, true, [])) else fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) | _ -> fail ("could not find candidates for " ^ term_to_string (get_head l2 am)) end | _ -> if is_only_uvar l2 am then ( // Terms left in l1, but only a uvar left in l2. // Put all terms left at the end of l1_rem, so that they can be unified // with exactly the uvar because of the structure of xsdenote try_unifying_remaining l1 (get_head l2 am) am; l1_del `List.Tot.append` l1, l2_del `List.Tot.append` l2, false, [] ) else let rem1, rem2, l1_del', l2_del' = equivalent_lists_once l1 l2 l1_del l2_del am in let n' = List.Tot.length rem1 in if n' >= n then ( // Try to make progress by matching non-uvars of l2 with candidates in l1 let rem1, rem2, l1_del', l2_del' = equivalent_lists_once_l2 rem1 rem2 l1_del' l2_del' am in let n' = List.Tot.length rem1 in if n' >= n then ( // Should always be smaller or equal to n // If it is equal, no progress was made. if use_smt then // SMT fallback is allowed let new_am, uvar_terms = replace_smt_uvars rem1 rem2 am in let l1_f, l2_f, b = equivalent_lists_fallback n' rem1 rem2 l1_del' l2_del' new_am in l1_f, l2_f, b, uvar_terms else fail_atoms rem1 rem2 am ) else equivalent_lists' n' use_smt rem1 rem2 l1_del' l2_del' am ) else equivalent_lists' n' use_smt rem1 rem2 l1_del' l2_del' am /// Checks if term for atom t unifies with fall uvars in l let rec unifies_with_all_uvars (t:term) (l:list atom) (am:amap term) : Tac bool = match l with | [] -> true | hd::tl -> if unifies_with_all_uvars t tl am then ( // Unified with tail, try this term let hd_t = select hd am in if is_uvar hd_t then ( // The head term is a uvar, try unifying try if unify t hd_t then raise Success else raise Failed with | Success -> true | _ -> false ) else true // The uvar is not a head term, we do not need to try it ) else false /// Puts all terms in l1 that cannot unify with the uvars in l2 at the top: /// They need to be solved first let rec most_restricted_at_top (l1 l2:list atom) (am:amap term) : Tac (list atom) = match l1 with | [] -> [] | hd::tl -> if unifies_with_all_uvars (select hd am) l2 am then (most_restricted_at_top tl l2 am) `List.Tot.append` [hd] else hd::(most_restricted_at_top tl l2 am) /// Core AC-unification tactic. /// First remove all trivially equal terms, then try to decide equivalence. /// Assumes that l1 does not contain any vprop uvar. /// If it succeeds, returns permutations of l1, l2, and a boolean indicating /// if l2 has a trailing empty frame to be unified let equivalent_lists (use_smt:bool) (l1 l2:list atom) (am:amap term) : Tac (list atom * list atom * bool * list term) = let l1, l2, l1_del, l2_del = trivial_cancels l1 l2 am in let l1 = most_restricted_at_top l1 l2 am in let n = List.Tot.length l1 in let l1_del, l2_del, emp_frame, uvar_terms = equivalent_lists' n use_smt l1 l2 l1_del l2_del am in l1_del, l2_del, emp_frame, uvar_terms (* Helpers to relate the actual terms to their representation as a list of atoms *) open FStar.Reflection.V2.Derived.Lemmas let rec list_to_string (l:list term) : Tac string = match l with | [] -> "end" | hd::tl -> term_to_string hd ^ " " ^ list_to_string tl let rec mdenote_gen (#a:Type u#aa) (unit:a) (mult:a -> a -> a) (am:amap a) (e:exp) : a = match e with | Unit -> unit | Atom x -> select x am | Mult e1 e2 -> mult (mdenote_gen unit mult am e1) (mdenote_gen unit mult am e2) let rec xsdenote_gen (#a:Type) (unit:a) (mult:a -> a -> a) (am:amap a) (xs:list atom) : a = match xs with | [] -> unit | [x] -> select x am | x::xs' -> mult (select x am) (xsdenote_gen unit mult am xs') unfold let mdenote (#a:Type u#aa) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e:exp) : a = let open FStar.Algebra.CommMonoid.Equiv in mdenote_gen (CM?.unit m) (CM?.mult m) am e unfold let xsdenote (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs:list atom) : a = let open FStar.Algebra.CommMonoid.Equiv in xsdenote_gen (CM?.unit m) (CM?.mult m) am xs // We reimplement List.Tot.Base.append because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_append (#t: Type) (l1 l2: list t) : Pure (list t) (requires True) (ensures (fun res -> res == l1 `List.Tot.append` l2)) (decreases l1) = match l1 with | [] -> l2 | a :: q -> a :: my_append q l2 let rec flatten (e:exp) : list atom = match e with | Unit -> [] | Atom x -> [x] | Mult e1 e2 -> flatten e1 `my_append` flatten e2 let rec flatten_correct_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs1 xs2:list atom) : Lemma (xsdenote eq m am (xs1 `my_append` xs2) `CE.EQ?.eq eq` CE.CM?.mult m (xsdenote eq m am xs1) (xsdenote eq m am xs2)) = let open FStar.Algebra.CommMonoid.Equiv in match xs1 with | [] -> CM?.identity m (xsdenote eq m am xs2); EQ?.symmetry eq (CM?.mult m (CM?.unit m) (xsdenote eq m am xs2)) (xsdenote eq m am xs2) | [x] -> ( if (Nil? xs2) then (right_identity eq m (select x am); EQ?.symmetry eq (CM?.mult m (select x am) (CM?.unit m)) (select x am)) else EQ?.reflexivity eq (CM?.mult m (xsdenote eq m am [x]) (xsdenote eq m am xs2))) | x::xs1' -> flatten_correct_aux eq m am xs1' xs2; EQ?.reflexivity eq (select x am); CM?.congruence m (select x am) (xsdenote eq m am (xs1' `my_append` xs2)) (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2)); CM?.associativity m (select x am) (xsdenote eq m am xs1') (xsdenote eq m am xs2); EQ?.symmetry eq (CM?.mult m (CM?.mult m (select x am) (xsdenote eq m am xs1')) (xsdenote eq m am xs2)) (CM?.mult m (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2))); EQ?.transitivity eq (CM?.mult m (select x am) (xsdenote eq m am (xs1' `my_append` xs2))) (CM?.mult m (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2))) (CM?.mult m (CM?.mult m (select x am) (xsdenote eq m am xs1')) (xsdenote eq m am xs2)) let rec flatten_correct (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e:exp) : Lemma (mdenote eq m am e `CE.EQ?.eq eq` xsdenote eq m am (flatten e)) = let open FStar.Algebra.CommMonoid.Equiv in match e with | Unit -> EQ?.reflexivity eq (CM?.unit m) | Atom x -> EQ?.reflexivity eq (select x am) | Mult e1 e2 -> flatten_correct_aux eq m am (flatten e1) (flatten e2); EQ?.symmetry eq (xsdenote eq m am (flatten e1 `my_append` flatten e2)) (CM?.mult m (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2))); flatten_correct eq m am e1; flatten_correct eq m am e2; CM?.congruence m (mdenote eq m am e1) (mdenote eq m am e2) (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2)); EQ?.transitivity eq (CM?.mult m (mdenote eq m am e1) (mdenote eq m am e2)) (CM?.mult m (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2))) (xsdenote eq m am (flatten e1 `my_append` flatten e2)) let monoid_reflect (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (e1 e2:exp) (_ : squash (xsdenote eq m am (flatten e1) `CE.EQ?.eq eq` xsdenote eq m am (flatten e2))) : squash (mdenote eq m am e1 `CE.EQ?.eq eq` mdenote eq m am e2) = flatten_correct eq m am e1; flatten_correct eq m am e2; CE.EQ?.symmetry eq (mdenote eq m am e2) (xsdenote eq m am (flatten e2)); CE.EQ?.transitivity eq (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2)) (mdenote eq m am e2); CE.EQ?.transitivity eq (mdenote eq m am e1) (xsdenote eq m am (flatten e1)) (mdenote eq m am e2) // Here we sort the variable numbers // We reimplement List.Tot.Base.sortWith because we need our tactic to normalize it, // but we don't want to normalize user-provided instances let rec my_partition (#a: Type) (f: (a -> Tot bool)) (l: list a) : Pure (list a & list a) (requires True) (ensures (fun res -> res == List.Tot.partition f l)) = match l with | [] -> [], [] | hd::tl -> let l1, l2 = my_partition f tl in if f hd then hd::l1, l2 else l1, hd::l2 let rec partition_ext (#a: Type) (f1 f2: (a -> Tot bool)) (l: list a) : Lemma (requires (forall x . f1 x == f2 x)) (ensures (List.Tot.partition f1 l == List.Tot.partition f2 l)) = match l with | [] -> () | hd::tl -> partition_ext f1 f2 tl let my_bool_of_compare (#a: Type) (f: a -> a -> Tot int) (x: a) (y: a) : Tot bool = f x y < 0 let rec my_sortWith (#a: Type) (f: (a -> a -> Tot int)) (l:list a) : Pure (list a) (requires True) (ensures (fun res -> res == List.Tot.sortWith f l)) (decreases (List.Tot.length l)) = match l with | [] -> [] | pivot::tl -> let hi, lo = my_partition (my_bool_of_compare f pivot) tl in partition_ext (my_bool_of_compare f pivot) (List.Tot.bool_of_compare f pivot) tl; List.Tot.partition_length (List.Tot.bool_of_compare f pivot) tl; my_append (my_sortWith f lo) (pivot::my_sortWith f hi) let rec sortWith_ext (#a: Type) (f1 f2: (a -> a -> Tot int)) (l: list a) : Lemma (requires (forall x y . f1 x y == f2 x y)) (ensures (List.Tot.sortWith f1 l == List.Tot.sortWith f2 l)) (decreases (List.Tot.length l)) = match l with | [] -> () | pivot::tl -> partition_ext (List.Tot.bool_of_compare f1 pivot) (List.Tot.bool_of_compare f2 pivot) tl; List.Tot.partition_length (List.Tot.bool_of_compare f1 pivot) tl; let hi, lo = List.Tot.partition (List.Tot.bool_of_compare f1 pivot) tl in sortWith_ext f1 f2 lo; sortWith_ext f1 f2 hi let permute = list atom -> list atom let my_compare_of_bool (#a:eqtype) (rel: a -> a -> Tot bool) (x: a) (y: a) : Tot int = if x `rel` y then -1 else if x = y then 0 else 1 let sort : permute = my_sortWith #int (my_compare_of_bool (<)) #push-options "--fuel 1 --ifuel 1" let lemma_xsdenote_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (hd:atom) (tl:list atom) : Lemma (xsdenote eq m am (hd::tl) `CE.EQ?.eq eq` (CE.CM?.mult m (select hd am) (xsdenote eq m am tl))) = let open FStar.Algebra.CommMonoid.Equiv in match tl with | [] -> assert (xsdenote eq m am (hd::tl) == select hd am); CM?.identity m (select hd am); EQ?.symmetry eq (CM?.unit m `CM?.mult m` select hd am) (select hd am); CM?.commutativity m (CM?.unit m) (select hd am); EQ?.transitivity eq (xsdenote eq m am (hd::tl)) (CM?.unit m `CM?.mult m` select hd am) (CM?.mult m (select hd am) (xsdenote eq m am tl)) | _ -> EQ?.reflexivity eq (xsdenote eq m am (hd::tl)) let rec partition_equiv (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (pivot:atom) (q:list atom) : Lemma (let open FStar.List.Tot.Base in let hi, lo = partition (bool_of_compare (compare_of_bool (<)) pivot) q in CE.EQ?.eq eq (xsdenote eq m am hi `CE.CM?.mult m` xsdenote eq m am lo) (xsdenote eq m am q)) = let open FStar.Algebra.CommMonoid.Equiv in let open FStar.List.Tot.Base in let f = bool_of_compare (compare_of_bool (<)) pivot in let hi, lo = partition f q in match q with | [] -> CM?.identity m (xsdenote eq m am hi) | hd::tl -> let l1, l2 = partition f tl in partition_equiv eq m am pivot tl; assert (EQ?.eq eq (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (xsdenote eq m am tl)); EQ?.reflexivity eq (xsdenote eq m am l1); EQ?.reflexivity eq (xsdenote eq m am l2); EQ?.reflexivity eq (xsdenote eq m am hi); EQ?.reflexivity eq (xsdenote eq m am lo); if f hd then begin assert (hi == hd::l1 /\ lo == l2); lemma_xsdenote_aux eq m am hd l1; CM?.congruence m (xsdenote eq m am hi) (xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am l1) (xsdenote eq m am l2); CM?.associativity m (select hd am) (xsdenote eq m am l1) (xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) ((select hd am `CM?.mult m` xsdenote eq m am l1) `CM?.mult m` xsdenote eq m am l2) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)); EQ?.reflexivity eq (select hd am); CM?.congruence m (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (select hd am) (xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)) (select hd am `CM?.mult m` xsdenote eq m am tl); lemma_xsdenote_aux eq m am hd tl; EQ?.symmetry eq (xsdenote eq m am (hd::tl)) (select hd am `CM?.mult m` xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am tl) (xsdenote eq m am (hd::tl)) end else begin assert (hi == l1 /\ lo == hd::l2); lemma_xsdenote_aux eq m am hd l2; CM?.congruence m (xsdenote eq m am hi) (xsdenote eq m am lo) (xsdenote eq m am l1) (select hd am `CM?.mult m` xsdenote eq m am l2); CM?.commutativity m (xsdenote eq m am l1) (select hd am `CM?.mult m` xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (xsdenote eq m am l1 `CM?.mult m` (select hd am `CM?.mult m` xsdenote eq m am l2)) ((select hd am `CM?.mult m` xsdenote eq m am l2) `CM?.mult m` xsdenote eq m am l1); CM?.associativity m (select hd am) (xsdenote eq m am l2) (xsdenote eq m am l1); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) ((select hd am `CM?.mult m` xsdenote eq m am l2) `CM?.mult m` xsdenote eq m am l1) (select hd am `CM?.mult m` (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1)); CM?.commutativity m (xsdenote eq m am l2) (xsdenote eq m am l1); EQ?.reflexivity eq (select hd am); CM?.congruence m (select hd am) (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1) (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l2 `CM?.mult m` xsdenote eq m am l1)) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)); CM?.congruence m (select hd am) (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2) (select hd am) (xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` (xsdenote eq m am l1 `CM?.mult m` xsdenote eq m am l2)) (select hd am `CM?.mult m` xsdenote eq m am tl); lemma_xsdenote_aux eq m am hd tl; EQ?.symmetry eq (xsdenote eq m am (hd::tl)) (select hd am `CM?.mult m` xsdenote eq m am tl); EQ?.transitivity eq (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select hd am `CM?.mult m` xsdenote eq m am tl) (xsdenote eq m am (hd::tl)) end let rec sort_correct_aux (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (xs:list atom) : Lemma (requires True) (ensures xsdenote eq m am xs `CE.EQ?.eq eq` xsdenote eq m am (sort xs)) (decreases (FStar.List.Tot.Base.length xs)) = let open FStar.Algebra.CommMonoid.Equiv in match xs with | [] -> EQ?.reflexivity eq (xsdenote eq m am []) | pivot::q -> let sort0 : permute = List.Tot.sortWith #int (List.Tot.compare_of_bool (<)) in let sort_eq (l: list atom) : Lemma (sort l == sort0 l) [SMTPat (sort l)] = sortWith_ext (my_compare_of_bool (<)) (List.Tot.compare_of_bool (<)) l in let open FStar.List.Tot.Base in let f:int -> int -> int = compare_of_bool (<) in let hi, lo = partition (bool_of_compare f pivot) q in flatten_correct_aux eq m am (sort lo) (pivot::sort hi); assert (xsdenote eq m am (sort xs) `EQ?.eq eq` CM?.mult m (xsdenote eq m am (sort lo)) (xsdenote eq m am (pivot::sort hi))); lemma_xsdenote_aux eq m am pivot (sort hi); EQ?.reflexivity eq (xsdenote eq m am (sort lo)); CM?.congruence m (xsdenote eq m am (sort lo)) (xsdenote eq m am (pivot::sort hi)) (xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` xsdenote eq m am (pivot::sort hi)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)))); CM?.commutativity m (xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` xsdenote eq m am (sort hi)); CM?.associativity m (select pivot am) (xsdenote eq m am (sort hi)) (xsdenote eq m am (sort lo)); EQ?.transitivity eq (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))) ((select pivot am `CM?.mult m` xsdenote eq m am (sort hi)) `CM?.mult m` xsdenote eq m am (sort lo)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (xsdenote eq m am (sort lo) `CM?.mult m` (select pivot am `CM?.mult m` xsdenote eq m am (sort hi))) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)))); partition_length (bool_of_compare f pivot) q; sort_correct_aux eq m am hi; sort_correct_aux eq m am lo; EQ?.symmetry eq (xsdenote eq m am lo) (xsdenote eq m am (sort lo)); EQ?.symmetry eq (xsdenote eq m am hi) (xsdenote eq m am (sort hi)); CM?.congruence m (xsdenote eq m am (sort hi)) (xsdenote eq m am (sort lo)) (xsdenote eq m am hi) (xsdenote eq m am lo); assert (EQ?.eq eq (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)); EQ?.reflexivity eq (select pivot am); CM?.congruence m (select pivot am) (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo)) (select pivot am) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am (sort hi) `CM?.mult m` xsdenote eq m am (sort lo))) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo))); partition_equiv eq m am pivot q; CM?.congruence m (select pivot am) (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo) (select pivot am) (xsdenote eq m am q); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am hi `CM?.mult m` xsdenote eq m am lo)) (select pivot am `CM?.mult m` (xsdenote eq m am q)); assert (EQ?.eq eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am q))); lemma_xsdenote_aux eq m am pivot q; EQ?.symmetry eq (xsdenote eq m am (pivot::q)) (select pivot am `CM?.mult m` (xsdenote eq m am q)); EQ?.transitivity eq (xsdenote eq m am (sort xs)) (select pivot am `CM?.mult m` (xsdenote eq m am q)) (xsdenote eq m am xs); EQ?.symmetry eq (xsdenote eq m am (sort xs)) (xsdenote eq m am xs) #pop-options #push-options "--fuel 0 --ifuel 0" (* Lemmas to be called after a permutation compatible with AC-unification was found *) let smt_reflexivity (#a:Type) (eq:CE.equiv a) (x y:a) : Lemma (requires x == y) (ensures CE.EQ?.eq eq x y) = CE.EQ?.reflexivity eq x let identity_left_smt (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x y:a) : Lemma (requires x == y) (ensures CE.EQ?.eq eq x (CE.CM?.mult m (CE.CM?.unit m) y)) = CE.CM?.identity m x; CE.EQ?.symmetry eq (CE.CM?.mult m (CE.CM?.unit m) x) x let identity_left (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x:a) : Lemma (CE.EQ?.eq eq x (CE.CM?.mult m (CE.CM?.unit m) x)) = CE.CM?.identity m x; CE.EQ?.symmetry eq (CE.CM?.mult m (CE.CM?.unit m) x) x let identity_right_diff (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (x y:a) : Lemma (requires CE.EQ?.eq eq x y) (ensures CE.EQ?.eq eq (CE.CM?.mult m x (CE.CM?.unit m)) y) = CE.right_identity eq m x; CE.EQ?.transitivity eq (CE.CM?.mult m x (CE.CM?.unit m)) x y /// Dismiss possible vprops goals that might have been created by lemma application. /// These vprops will be instantiated at a later stage; else, Meta-F* will raise an error let rec dismiss_slprops () : Tac unit = match term_as_formula' (cur_goal ()) with | App t _ -> if is_fvar t (`%squash) then () else (dismiss(); dismiss_slprops ()) | _ -> dismiss(); dismiss_slprops () /// Recursively removing trailing empty assertions let rec n_identity_left (n:int) (eq m:term) : Tac unit = if n = 0 then ( apply_lemma (`(CE.EQ?.reflexivity (`#eq))); // Cleaning up, in case a uvar has been generated here. It'll be solved later set_goals []) else ( apply_lemma (`identity_right_diff (`#eq) (`#m)); // Drop the slprops generated, they will be solved later dismiss_slprops (); n_identity_left (n-1) eq m ) /// Helper lemma: If two vprops (as represented by lists of atoms) are equivalent, then their canonical forms /// (corresponding to applying the sort function on atoms) are equivalent let equivalent_sorted (#a:Type) (eq:CE.equiv a) (m:CE.cm a eq) (am:amap a) (l1 l2 l1' l2':list atom) : Lemma (requires sort l1 == sort l1' /\ sort l2 == sort l2' /\ xsdenote eq m am l1 `CE.EQ?.eq eq` xsdenote eq m am l2) (ensures xsdenote eq m am l1' `CE.EQ?.eq eq` xsdenote eq m am l2') = let open FStar.Algebra.CommMonoid.Equiv in sort_correct_aux eq m am l1'; sort_correct_aux eq m am l1; EQ?.symmetry eq (xsdenote eq m am l1) (xsdenote eq m am (sort l1)); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am (sort l1')) (xsdenote eq m am l1); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am l1) (xsdenote eq m am l2); sort_correct_aux eq m am l2; EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am l2) (xsdenote eq m am (sort l2)); sort_correct_aux eq m am l2'; EQ?.symmetry eq (xsdenote eq m am l2') (xsdenote eq m am (sort l2')); EQ?.transitivity eq (xsdenote eq m am l1') (xsdenote eq m am (sort l2)) (xsdenote eq m am l2') #pop-options /// Finds the position of first occurrence of x in xs. /// This is now specialized to terms and their funny term_eq_old. let rec where_aux (n:nat) (x:term) (xs:list term) : Tac (option nat) (decreases xs) = match xs with | [] -> None | x'::xs' -> if term_eq_old x x' then Some n else where_aux (n+1) x xs' let where = where_aux 0 let fatom (t:term) (ts:list term) (am:amap term) : Tac (exp * list term * amap term) = match where t ts with | Some v -> (Atom v, ts, am) | None -> let vfresh = List.Tot.Base.length ts in let t = norm_term [iota; zeta] t in (Atom vfresh, ts `List.Tot.append` [t], update vfresh t am) /// Transforming a term into the corresponding list of atoms /// If the atomic terms were already present in the map [am], then /// they correspond to the same atoms /// This expects that mult, unit, and t have already been normalized let rec reification_aux (ts:list term) (am:amap term) (mult unit t : term) : Tac (exp * list term * amap term) = let hd, tl = collect_app_ref t in match inspect_unascribe hd, List.Tot.Base.list_unref tl with | Tv_FVar fv, [(t1, Q_Explicit) ; (t2, Q_Explicit)] -> if term_eq_old (pack (Tv_FVar fv)) mult then (let (e1, ts, am) = reification_aux ts am mult unit t1 in let (e2, ts, am) = reification_aux ts am mult unit t2 in (Mult e1 e2, ts, am)) else fatom t ts am | _, _ -> if term_eq_old t unit then (Unit, ts, am) else fatom t ts am /// Performs the required normalization before calling the function above let reification (eq: term) (m: term) (ts:list term) (am:amap term) (t:term) : Tac (exp * list term * amap term) = let mult = norm_term [iota; zeta; delta] (`CE.CM?.mult (`#m)) in let unit = norm_term [iota; zeta; delta] (`CE.CM?.unit (`#m)) in let t = norm_term [iota; zeta] t in reification_aux ts am mult unit t /// Meta-F* internal: Transforms the atom map into a term let rec convert_map (m : list (atom * term)) : term = match m with | [] -> `[] | (a, t)::ps -> let a = pack (Tv_Const (C_Int a)) in (* let t = norm_term [delta] t in *) `((`#a, (`#t)) :: (`#(convert_map ps))) /// `am` is an amap (basically a list) of terms, each representing a value /// of type `a` (whichever we are canonicalizing). This functions converts /// `am` into a single `term` of type `amap a`, suitable to call `mdenote` with *) let convert_am (am : amap term) : term = let (map, def) = am in (* let def = norm_term [delta] def in *) `( (`#(convert_map map), `#def) ) /// Transforms a term representatoin into a term through quotation let rec quote_exp (e:exp) : term = match e with | Unit -> (`Unit) | Mult e1 e2 -> (`Mult (`#(quote_exp e1)) (`#(quote_exp e2))) | Atom n -> let nt = pack (Tv_Const (C_Int n)) in (`Atom (`#nt)) let rec quote_atoms (l:list atom) = match l with | [] -> `[] | hd::tl -> let nt = pack (Tv_Const (C_Int hd)) in (`Cons (`#nt) (`#(quote_atoms tl))) /// Some internal normalization steps to make reflection of vprops into atoms and atom permutation go smoothly. /// We reimplemented sorting/list functions to normalize our uses without normalizing those introduced by the user. let normal_tac_steps = [primops; iota; zeta; delta_only [ `%mdenote; `%select; `%my_assoc; `%my_append; `%flatten; `%sort; `%my_sortWith; `%my_partition; `%my_bool_of_compare; `%my_compare_of_bool; `%fst; `%__proj__Mktuple2__item___1; `%snd; `%__proj__Mktuple2__item___2; `%CE.__proj__CM__item__unit; `%CE.__proj__CM__item__mult; `%rm]] /// The normalization function, using the above normalization steps let normal_tac (#a:Type) (x:a) : a = FStar.Pervasives.norm normal_tac_steps x /// Helper lemma to establish relation between normalized and initial values let normal_elim (x:Type0) : Lemma (requires x) (ensures normal_tac x) = () exception Result of list atom * list atom * bool * list term /// F* equalities are typed, but the generated type sometimes is a unification variable. /// This helper ensures that such unification variables are not left unresolved, which would lead to an error let close_equality_typ' (t:term) : Tac unit = let f = term_as_formula' t in match f with | Comp (Eq (Some u)) l _ -> if is_uvar u then (unshelve u; exact_with_ref (tc (cur_env()) l)) | _ -> () /// Recursively closing equality types in a given term (usually a unification constraint) let close_equality_typ (t:term) : Tac unit = visit_tm close_equality_typ' t /// Core unification tactic. /// Transforms terms into their atom representations, /// Tries to find a solution to AC-unification, and if so, /// soundly permutes the atom representations before calling the unifier /// to check the validity of the provided solution. /// In the case where SMT rewriting was needed, equalities abduction is performed by instantiating the /// abduction prop unification variable with the corresponding guard /// 09/24: /// /// The tactic internally builds a map from atoms to terms /// and uses the map for reflecting the goal to atoms representation /// During reflection, the tactics engine typechecks the amap, and hence all /// the terms again /// This typechecking of terms is unnecessary, since the terms are coming /// from the goal, and hence are already well-typed /// Worse, re-typechecking them may generate a lot of SMT queries /// And even worse, the SMT queries are discharged in the static context, /// requiring various workarounds (e.g. squash variables for if conditions etc.) /// /// To fix this, we now "name" the terms and use the amap with names /// /// Read through the canon_l_r function for how we do this /// The following three lemmas are helpers to manipulate the goal in canon_l_r [@@ no_subtyping] let inst_bv (#a:Type) (#p:a -> Type0) (#q:Type0) (x:a) (_:squash (p x ==> q)) : Lemma ((forall (x:a). p x) ==> q) = () let modus_ponens (#p #q:Type0) (_:squash p) : Lemma ((p ==> q) ==> q) = () let cut (p q:Type0) : Lemma (requires p /\ (p ==> q)) (ensures q) = () let and_true (p: Type0) : Lemma (requires (p /\ (p ==> True))) (ensures p) = () let solve_implies_true (p: Type0) : Lemma (p ==> True) = () // This exception is raised for failures that should not be considered // hard but should allow postponing the goal instead exception Postpone of string (* NOTE! Redefining boolean disjunction to *not* be short-circuiting, since we cannot use an effectful result as argument of Prims.op_BarBar *) private let bor = op_BarBar private let is_and (t:term) : bool = is_any_fvar t [`%(/\); `%prop_and] private let is_squash (t:term) : bool = is_any_fvar t [`%squash; `%auto_squash] private let is_star (t:term) : bool = is_any_fvar t [`%star; `%VStar] private let is_star_or_unit (t:term) : bool = is_any_fvar t [`%star; `%VStar; `%VUnit] let rec unify_pr_with_true (pr: term) : Tac unit = let hd, tl = collect_app pr in if is_and hd then match tl with | [pr_l, _; pr_r, _] -> unify_pr_with_true pr_l; unify_pr_with_true pr_r | _ -> fail "unify_pr_with_true: ill-formed /\\" else match inspect_unascribe hd with | Tv_Uvar _ _ -> if unify pr (`true_p) then () else begin fail "unify_pr_with_true: could not unify SMT prop with True" end | _ -> if List.Tot.length (free_uvars pr) = 0 then () else // postpone the goal instead of failing hard, to allow for other goals to solve those uvars raise (Postpone "unify_pr_with_true: some uvars are still there") let elim_and_l_squash (#a #b: Type0) (#goal: Type0) (f: (a -> Tot (squash goal))) (h: (a /\ b)) : Tot (squash goal) = let f' (x: squash a) : Tot (squash goal) = FStar.Squash.bind_squash x f in let elim_impl (x: squash (a /\ b)) : Tot (squash a) = () in f' (elim_impl (FStar.Squash.return_squash h)) let elim_and_r_squash (#a #b: Type0) (#goal: Type0) (f: (b -> Tot (squash goal))) (h: (a /\ b)) : Tot (squash goal) = let f' (x: squash b) : Tot (squash goal) = FStar.Squash.bind_squash x f in let elim_impl (x: squash (a /\ b)) : Tot (squash b) = () in f' (elim_impl (FStar.Squash.return_squash h)) let _return_squash (#a: Type) () (x: a) : Tot (squash a) = FStar.Squash.return_squash x let rec set_abduction_variable_term (pr: term) : Tac term = let hd, tl = collect_app pr in if is_and hd then match tl with | (pr_l, Q_Explicit) :: (pr_r, Q_Explicit) :: [] -> if List.Tot.length (free_uvars pr_r) = 0 then let arg = set_abduction_variable_term pr_l in mk_app (`elim_and_l_squash) [arg, Q_Explicit] else if List.Tot.length (free_uvars pr_l) = 0 then let arg = set_abduction_variable_term pr_r in mk_app (`elim_and_r_squash) [arg, Q_Explicit] else // postpone the goal instead of failing hard, to allow for other goals to solve those uvars raise (Postpone "set_abduction_variable_term: there are still uvars on both sides of l_and") | _ -> fail "set_abduction_variable: ill-formed /\\" else match inspect hd with | Tv_Uvar _ _ -> mk_app (`_return_squash) [`(), Q_Explicit] | _ -> fail "set_abduction_variable: cannot unify" let set_abduction_variable () : Tac unit = let g = cur_goal () in match inspect_unascribe g with | Tv_Arrow b _ -> let pr = b.sort in exact (set_abduction_variable_term pr) | _ -> fail "Not an arrow goal" let canon_l_r (use_smt:bool) (carrier_t:term) //e.g. vprop (eq:term) (m:term) (pr pr_bind:term) (lhs rel rhs:term) : Tac unit = let m_unit = norm_term [iota; zeta; delta] (`(CE.CM?.unit (`#m))) in let m_mult = norm_term [iota; zeta; delta] (`(CE.CM?.mult (`#m))) in let am = const m_unit in (* empty map *) let (r1_raw, ts, am) = reification eq m [] am lhs in let (r2_raw, _, am) = reification eq m ts am rhs in // Encapsulating this in a try/with to avoid spawning uvars for smt_fallback let l1_raw, l2_raw, emp_frame, uvar_terms = try let res = equivalent_lists use_smt (flatten r1_raw) (flatten r2_raw) am in raise (Result res) with | TacticFailure m -> fail m | Result res -> res | _ -> fail "uncaught exception in equivalent_lists" in //So now we have: // am : amap mapping atoms to terms in lhs and rhs // r1_raw : an expression in the atoms language for lhs // r2_raw : an expression in the atoms language for rhs // l1_raw : sorted list of atoms in lhs // l2_raw : sorted list of atoms in rhs // //In particular, r1_raw and r2_raw capture lhs and rhs structurally // (i.e. same associativity, emp, etc.) // //Whereas l1_raw and l2_raw are "canonical" representations of lhs and rhs // (vis xsdenote) //Build an amap where atoms are mapped to names //The type of these names is carrier_t passed by the caller let am_bv : list (atom & namedv & typ) = mapi (fun i (a, _) -> let x = fresh_namedv_named ("x" ^ (string_of_int i)) in (a, x, carrier_t)) (fst am) in let am_bv_term : amap term = map (fun (a, bv, _sort) -> a, pack (Tv_Var bv)) am_bv, snd am in let mdenote_tm (e:exp) : term = mdenote_gen m_unit (fun t1 t2 -> mk_app m_mult [(t1, Q_Explicit); (t2, Q_Explicit)]) am_bv_term e in let xsdenote_tm (l:list atom) : term = xsdenote_gen m_unit (fun t1 t2 -> mk_app m_mult [(t1, Q_Explicit); (t2, Q_Explicit)]) am_bv_term l in //Get the named representations of lhs, rhs, and their respective sorted versions let lhs_named = mdenote_tm r1_raw in let rhs_named = mdenote_tm r2_raw in let sorted_lhs_named = xsdenote_tm l1_raw in let sorted_rhs_named = xsdenote_tm l2_raw in //We now build an auxiliary goal of the form: // // forall xs. (sorted_lhs_named `rel` sorted_rhs_names) ==> (lhs_names `rel` rhs_named) // // where xs are the fresh names that we introduced earlier let mk_rel (l r:term) : term = mk_app rel [(l, Q_Explicit); (r, Q_Explicit)] in let imp_rhs = mk_rel lhs_named rhs_named in let imp_lhs = mk_rel sorted_lhs_named sorted_rhs_named in let imp = mk_app (pack (Tv_FVar (pack_fv imp_qn))) [(imp_lhs, Q_Explicit); (imp_rhs, Q_Explicit)] in //fold over names and quantify over them let aux_goal = fold_right (fun (_, nv, sort) t -> let nvv = inspect_namedv nv in let b = { ppname = nvv.ppname; uniq = nvv.uniq; qual = Q_Explicit; attrs = []; sort = sort; } in let _, t = close_term b t in let t = pack (Tv_Abs b t) in mk_app (pack (Tv_FVar (pack_fv forall_qn))) [t, Q_Explicit]) am_bv imp in //Introduce a cut with the auxiliary goal apply_lemma (`cut (`#aux_goal)); //After the cut, the goal looks like: A /\ (A ==> G) // where A is the auxiliary goal and G is the original goal (lhs `rel` rhs) split (); //Solving A: focus (fun _ -> //The proof follows a similar structure as before naming was introduced // //Except that this time, the amap is in terms of names, // and hence its typechecking is faster and (hopefully) no SMT involved //Open the forall binders in A, and use the fresh names to build an amap let am = fold_left (fun am (a, _, _sort) -> let b = forall_intro () in let v = binding_to_namedv b in (a, pack (Tv_Var v))::am) [] am_bv, snd am in //Introduce the lhs of implication let b = implies_intro () in //Now the proof is the plain old canon proof let am = convert_am am in let r1 = quote_exp r1_raw in let r2 = quote_exp r2_raw in change_sq (`(normal_tac (mdenote (`#eq) (`#m) (`#am) (`#r1) `CE.EQ?.eq (`#eq)` mdenote (`#eq) (`#m) (`#am) (`#r2)))); apply_lemma (`normal_elim); apply (`monoid_reflect ); let l1 = quote_atoms l1_raw in let l2 = quote_atoms l2_raw in apply_lemma (`equivalent_sorted (`#eq) (`#m) (`#am) (`#l1) (`#l2)); if List.Tot.length (goals ()) = 0 then () else begin norm [primops; iota; zeta; delta_only [`%xsdenote; `%select; `%my_assoc; `%my_append; `%flatten; `%sort; `%my_sortWith; `%my_partition; `%my_bool_of_compare; `%my_compare_of_bool; `%fst; `%__proj__Mktuple2__item___1; `%snd; `%__proj__Mktuple2__item___2; `%CE.__proj__CM__item__unit; `%CE.__proj__CM__item__mult; `%rm; `%CE.__proj__EQ__item__eq; `%req; `%star;] ]; //The goal is of the form G1 /\ G2 /\ G3, as in the requires of equivalent_sorted split (); split (); //Solve G1 and G2 by trefl trefl (); trefl (); //G3 is the lhs of the implication in the auxiliary goal // that we have in our assumptions via b apply (`FStar.Squash.return_squash); exact (binding_to_term b) end); dismiss_slprops(); //Our goal now is A ==> G (where G is the original goal (lhs `rel` rhs)) //Open the forall binders ignore (repeatn (List.Tot.length am_bv) (fun _ -> apply_lemma (`inst_bv))); //And apply modus ponens apply_lemma (`modus_ponens); //Now our goal is sorted_lhs_named `rel` sorted_rhs_named // where the names are replaced with fresh uvars (from the repeatn call above) //So we just trefl match uvar_terms with | [] -> // Closing unneeded prop uvar focus (fun _ -> try apply_lemma (`and_true); split (); if emp_frame then apply_lemma (`identity_left (`#eq) (`#m)) else apply_lemma (`(CE.EQ?.reflexivity (`#eq))); unify_pr_with_true pr; // MUST be done AFTER identity_left/reflexivity, which can unify other uvars apply_lemma (`solve_implies_true) with | TacticFailure msg -> fail ("Cannot unify pr with true: " ^ msg) | e -> raise e ) | l -> if emp_frame then ( apply_lemma (`identity_left_smt (`#eq) (`#m)) ) else ( apply_lemma (`smt_reflexivity (`#eq)) ); t_trefl true; close_equality_typ (cur_goal()); revert (); set_abduction_variable () /// Wrapper around the tactic above /// The constraint should be of the shape `squash (equiv lhs rhs)` let canon_monoid (use_smt:bool) (carrier_t:term) (eq m:term) (pr pr_bind:term) : Tac unit = norm [iota; zeta]; let t = cur_goal () in // removing top-level squash application let sq, rel_xy = collect_app_ref t in // unpacking the application of the equivalence relation (lhs `EQ?.eq eq` rhs) (match rel_xy with | [(rel_xy,_)] -> ( let open FStar.List.Tot.Base in let rel, xy = collect_app_ref rel_xy in if (length xy >= 2) then ( match index xy (length xy - 2) , index xy (length xy - 1) with | (lhs, Q_Explicit) , (rhs, Q_Explicit) -> canon_l_r use_smt carrier_t eq m pr pr_bind lhs rel rhs | _ -> fail "Goal should have been an application of a binary relation to 2 explicit arguments" ) else ( fail "Goal should have been an application of a binary relation to n implicit and 2 explicit arguments" ) ) | _ -> fail "Goal should be squash applied to a binary relation") /// Instantiation of the generic AC-unification tactic with the vprop commutative monoid let canon' (use_smt:bool) (pr:term) (pr_bind:term) : Tac unit = canon_monoid use_smt (pack (Tv_FVar (pack_fv [`%vprop]))) (`req) (`rm) pr pr_bind /// Counts the number of unification variables corresponding to vprops in the term [t] let rec slterm_nbr_uvars (t:term) : Tac int = match inspect_unascribe t with | Tv_Uvar _ _ -> 1 | Tv_App _ _ -> let hd, args = collect_app t in if is_star_or_unit hd then // Only count the number of unresolved slprops, not program implicits slterm_nbr_uvars_argv args else if is_uvar hd then 1 else 0 | Tv_Abs _ t -> slterm_nbr_uvars t | _ -> 0 and slterm_nbr_uvars_argv (args: list argv) : Tac int = fold_left (fun n (x, _) -> n + slterm_nbr_uvars x) 0 args let guard_vprop (v: vprop) : Tot vprop = v let rec all_guards_solved (t: term) : Tac bool = match inspect_unascribe t with | Tv_Abs _ t -> all_guards_solved t | Tv_App _ _ -> let hd, args = collect_app t in if hd `is_fvar` (`%guard_vprop) then slterm_nbr_uvars_argv args = 0 else if not (all_guards_solved hd) then false else List.Tot.fold_left (fun (tac: (unit -> Tac bool)) (tm, _) -> let f () : Tac bool = if all_guards_solved tm then tac () else false in f ) (let f () : Tac bool = true in f) args () | _ -> true let unfold_guard () : Tac bool = if all_guards_solved (cur_goal ()) then begin focus (fun _ -> norm [delta_only [(`%guard_vprop)]]); true end else false let rec term_is_uvar (t: term) (i: int) : Tac bool = match inspect t with | Tv_Uvar i' _ -> i = i' | Tv_App _ _ -> let hd, args = collect_app t in term_is_uvar hd i | _ -> false val solve_can_be_split_for : string -> Tot unit val solve_can_be_split_lookup : unit // FIXME: src/reflection/FStar.Reflection.Basic.lookup_attr only supports fvar attributes, so we cannot directly look up for (solve_can_be_split_for blabla), we need a nullary attribute to use with lookup_attr let rec dismiss_all_but_last' (l: list goal) : Tac unit = match l with | [] | [_] -> set_goals l | _ :: q -> dismiss_all_but_last' q let dismiss_all_but_last () : Tac unit = dismiss_all_but_last' (goals ()) let rec dismiss_non_squash_goals' (keep:list goal) (goals:list goal) : Tac unit = match goals with | [] -> set_goals (List.Tot.rev keep) | hd :: tl -> let f = term_as_formula' (goal_type hd) in match f with | App hs _ -> if is_squash hs then dismiss_non_squash_goals' (hd::keep) tl else dismiss_non_squash_goals' keep tl | _ -> dismiss_non_squash_goals' keep tl let dismiss_non_squash_goals () = let g = goals () in dismiss_non_squash_goals' [] g let rec term_mem (te: term) (l: list term) : Tac bool = match l with | [] -> false | t' :: q -> if te `term_eq_old` t' then true else term_mem te q let rec lookup_by_term_attr' (attr: term) (e: env) (found: list fv) (l: list fv) : Tac (list fv) = match l with | [] -> List.Tot.rev found | f :: q -> let n = inspect_fv f in begin match lookup_typ e n with | None -> lookup_by_term_attr' attr e found q | Some se -> let found' = if attr `term_mem` sigelt_attrs se then f :: found else found in lookup_by_term_attr' attr e found' q end let lookup_by_term_attr (label_attr: term) (attr: term) : Tac (list fv) = let e = cur_env () in let candidates = lookup_attr label_attr e in lookup_by_term_attr' attr e [] candidates let rec bring_last_goal_on_top' (others: list goal) (goals: list goal) : Tac unit = match goals with | [] -> set_goals (List.Tot.rev others) | last :: [] -> set_goals (last :: List.Tot.rev others) | a :: q -> bring_last_goal_on_top' (a :: others) q let bring_last_goal_on_top () = let g = goals () in bring_last_goal_on_top' [] g let rec extract_contexts (lemma_left lemma_right label_attr attr: term) (t: term) : Tac (option (unit -> Tac unit)) = let hd, tl = collect_app t in if is_star hd then match tl with | (t_left, Q_Explicit) :: (t_right, Q_Explicit) :: [] -> let extract_right () : Tac (option (unit -> Tac unit)) = match extract_contexts lemma_left lemma_right label_attr attr t_right with | None -> None | Some f -> Some (fun _ -> apply_lemma lemma_right; dismiss_all_but_last (); f () ) in begin match extract_contexts lemma_left lemma_right label_attr attr t_left with | None -> extract_right () | Some f -> Some (fun _ -> try apply_lemma lemma_left; dismiss_all_but_last (); f () with _ -> begin match extract_right () with | None -> fail "no context on the right either" | Some g -> g () end ) end | _ -> None else let candidates = let hd_fv = match inspect_unascribe hd with | Tv_FVar fv -> Some fv | Tv_UInst fv _ -> Some fv | _ -> None in match hd_fv with | None -> [] | Some hd_fv -> let hd_s' = implode_qn (inspect_fv hd_fv) in let hd_s = pack (Tv_Const (C_String hd_s')) in lookup_by_term_attr label_attr (mk_app attr [hd_s, Q_Explicit]) in if Nil? candidates then None else Some (fun _ -> first (List.Tot.map (fun candidate _ -> apply_lemma (pack (Tv_FVar candidate)) <: Tac unit) candidates); dismiss_non_squash_goals () ) let extract_cbs_contexts = extract_contexts (`can_be_split_congr_l) (`can_be_split_congr_r) (`solve_can_be_split_lookup) (`solve_can_be_split_for) let open_existentials () : Tac unit = let e = cur_env () in if Nil? (lookup_attr (`solve_can_be_split_lookup) e) then fail "Tactic disabled: no available lemmas in context"; norm [delta_attr [`%__reduce__]]; let t0 = cur_goal () in match collect_app t0 with | _ (* squash/auto_squash *) , (t1, Q_Explicit) :: [] -> let hd, tl = collect_app t1 in if hd `is_fvar` (`%can_be_split) then match tl with | _ (* lhs *) :: (rhs, Q_Explicit) :: [] -> begin match extract_cbs_contexts rhs with | None -> fail "open_existentials: no context found" | Some f -> apply_lemma (`can_be_split_trans_rev); dismiss_all_but_last (); split (); focus f; bring_last_goal_on_top () // so that any preconditions for the selected lemma are scheduled for later end | _ -> fail "open_existentials: ill-formed can_be_split" else fail "open_existentials: not a can_be_split goal" | _ -> fail "open_existentials: not a squash goal" let try_open_existentials () : Tac bool = focus (fun _ -> try open_existentials (); true with _ -> false ) (* Solving the can_be_split* constraints, if they are ready to be scheduled A constraint is deemed ready to be scheduled if it contains only one vprop unification variable If so, constraints are stripped to their underlying definition based on vprop equivalence, introducing universally quantified variables when needed. Internal details of the encoding are removed through normalization, before calling the AC-unification tactic defined above *) /// Solves a `can_be_split` constraint let rec solve_can_be_split (args:list argv) : Tac bool = match args with | [(t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in try focus (fun _ -> apply_lemma (`equiv_can_be_split); dismiss_slprops(); // If we have exactly the same term on both side, // equiv_sl_implies would solve the goal immediately or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if rnbr = 0 then apply_lemma (`equiv_sym); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true with | _ -> let opened_some = try_open_existentials () in if opened_some then solve_can_be_split args // we only need args for their number of uvars, which has not changed else false ) else false | _ -> false // Ill-formed can_be_split, should not happen /// Solves a can_be_split_dep constraint let solve_can_be_split_dep (args:list argv) : Tac bool = match args with | [(p, _); (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> let p_bind = implies_intro () in apply_lemma (`equiv_can_be_split); dismiss_slprops (); or_else (fun _ -> let b = unify p (`true_p) in if not b then fail "could not unify SMT prop with True"; apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' true p (binding_to_term p_bind))); true ) else false | _ -> fail "ill-formed can_be_split_dep" /// Helper rewriting lemma val emp_unit_variant (p:vprop) : Lemma (ensures can_be_split p (p `star` emp)) /// Solves a can_be_split_forall constraint let solve_can_be_split_forall (args:list argv) : Tac bool = match args with | [_; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> ignore (forall_intro()); apply_lemma (`equiv_can_be_split); dismiss_slprops(); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed can_be_split_forall, should not happen" val solve_can_be_split_forall_dep_for : string -> Tot unit val solve_can_be_split_forall_dep_lookup : unit // FIXME: same as solve_can_be_split_for above let extract_cbs_forall_dep_contexts = extract_contexts (`can_be_split_forall_dep_congr_l) (`can_be_split_forall_dep_congr_r) (`solve_can_be_split_forall_dep_lookup) (`solve_can_be_split_forall_dep_for) let open_existentials_forall_dep () : Tac unit = let e = cur_env () in if Nil? (lookup_attr (`solve_can_be_split_forall_dep_lookup) e) then fail "Tactic disabled: no available lemmas in context"; norm [ delta_only [ `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit; `%FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult; `%rm; ]; iota; delta_attr [`%__reduce__]; ]; let t0 = cur_goal () in match collect_app t0 with | _ (* squash/auto_squash *) , (t1, Q_Explicit) :: [] -> let hd, tl = collect_app t1 in if hd `is_fvar` (`%can_be_split_forall_dep) then match tl with | _ (* cond *) :: _ (* lhs *) :: (rhs, Q_Explicit) :: [] | (_, Q_Implicit) (* #a *) :: _ (* cond *) :: _ (* lhs *) :: (rhs, Q_Explicit) :: [] -> begin match inspect_unascribe rhs with | Tv_Abs _ body -> begin match extract_cbs_forall_dep_contexts body with | None -> fail "open_existentials_forall_dep: no candidate" | Some f -> apply_lemma (`can_be_split_forall_dep_trans_rev); dismiss_all_but_last (); split (); focus f; bring_last_goal_on_top (); if Cons? (goals ()) then norm [] end | _ -> fail "open_existentials_forall_dep : not an abstraction" end | _ -> fail "open_existentials_forall_dep : wrong number of arguments to can_be_split_forall_dep" else fail "open_existentials_forall_dep : not a can_be_split_forall_dep goal" | _ -> fail "open_existentials_forall_dep : not a squash/auto_squash goal" let try_open_existentials_forall_dep () : Tac bool = focus (fun _ -> try open_existentials_forall_dep (); true with _ -> false ) /// Solves a can_be_split_forall_dep constraint let rec solve_can_be_split_forall_dep (args:list argv) : Tac bool = match args with | [_; (pr, _); (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in try focus (fun _ -> norm []; let x = forall_intro () in let pr = mk_app pr [(binding_to_term x, Q_Explicit)] in let p_bind = implies_intro () in apply_lemma (`equiv_can_be_split); or_else (fun _ -> flip()) (fun _ -> ()); let pr = norm_term [] pr in or_else (fun _ -> let b = unify pr (`true_p) in if not b then fail "could not unify SMT prop with True"; apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' true pr (binding_to_term p_bind))); true with | Postpone msg -> false | TacticFailure msg -> let opened = try_open_existentials_forall_dep () in if opened then solve_can_be_split_forall_dep args // we only need args for their number of uvars, which has not changed else fail msg | _ -> fail "Unexpected exception in framing tactic" ) else false | _ -> fail "Ill-formed can_be_split_forall_dep, should not happen" /// Solves an equiv_forall constraint let solve_equiv_forall (args:list argv) : Tac bool = match args with | [_; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> apply_lemma (`equiv_forall_elim); match goals () with | [] -> () | _ -> dismiss_slprops (); ignore (forall_intro()); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed equiv_forall, should not happen" /// Solves an equiv constraint let solve_equiv (args:list argv) : Tac bool = match args with | [(t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip ()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "Ill-formed equiv, should not happen" /// Solves a can_be_split_post constraint let solve_can_be_split_post (args:list argv) : Tac bool = match args with | [_; _; (t1, _); (t2, _)] -> let lnbr = slterm_nbr_uvars t1 in let rnbr = slterm_nbr_uvars t2 in if if lnbr + rnbr <= 1 then unfold_guard () else false then ( let open FStar.Algebra.CommMonoid.Equiv in focus (fun _ -> norm[]; let g = _cur_goal () in ignore (forall_intro()); apply_lemma (`equiv_forall_elim); match goals () with | [] -> () | _ -> dismiss_slprops (); ignore (forall_intro()); or_else (fun _ -> apply_lemma (`equiv_refl)) (fun _ -> if lnbr <> 0 && rnbr = 0 then apply_lemma (`equiv_sym); or_else (fun _ -> flip()) (fun _ -> ()); norm [delta_only [ `%__proj__CM__item__unit; `%__proj__CM__item__mult; `%rm; `%__proj__Mktuple2__item___1; `%__proj__Mktuple2__item___2; `%fst; `%snd]; delta_attr [`%__reduce__]; primops; iota; zeta]; canon' false (`true_p) (`true_p))); true ) else false | _ -> fail "ill-formed can_be_split_post" /// Checks whether any of the two terms was introduced during a Steel monadic return let is_return_eq (l r:term) : Tac bool = let nl, al = collect_app l in let nr, ar = collect_app r in is_fvar nl (`%return_pre) || is_fvar nr (`%return_pre) /// Solves indirection equalities introduced by the layered effects framework. /// If these equalities were introduced during a monadic return, they need to be solved /// at a later stage to avoid overly restricting contexts of unification variables let rec solve_indirection_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> if is_return_eq l r then later() else trefl(); solve_indirection_eqs (fuel - 1) | _ -> later(); solve_indirection_eqs (fuel - 1) /// Solve all equalities in the list of goals by calling the F* unifier let rec solve_all_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> trefl(); solve_all_eqs (fuel - 1) | _ -> later(); solve_all_eqs (fuel - 1) /// It is important to not normalize the return_pre eqs goals before unifying /// See test7 in FramingTestSuite for a detailed explanation let rec solve_return_eqs (fuel: nat) : Tac unit = if fuel = 0 then () else match goals () with | [] -> () | hd::_ -> let f = term_as_formula' (goal_type hd) in match f with | Comp (Eq _) l r -> trefl(); solve_return_eqs (fuel - 1) | _ -> later(); solve_return_eqs (fuel - 1) /// Strip annotations in a goal, to get to the underlying slprop equivalence let goal_to_equiv (loc:string) : Tac unit = let t = cur_goal () in let f = term_as_formula' t in match f with | App hd0 t -> if not (is_fvar hd0 (`%squash)) then fail (loc ^ " unexpected non-squash goal in goal_to_equiv"); let hd, args = collect_app t in if hd `is_fvar` (`%can_be_split) then ( apply_lemma (`equiv_can_be_split) ) else if hd `is_fvar` (`%can_be_split_forall) then ( ignore (forall_intro ()); apply_lemma (`equiv_can_be_split) ) else if hd `is_fvar` (`%equiv_forall) then ( apply_lemma (`equiv_forall_elim); ignore (forall_intro ()) ) else if hd `is_fvar` (`%can_be_split_post) then ( apply_lemma (`can_be_split_post_elim); dismiss_slprops(); ignore (forall_intro ()); ignore (forall_intro ()) ) else if hd `is_fvar` (`%can_be_split_dep) then ( fail ("can_be_split_dep not supported in " ^ loc) ) else if hd `is_fvar` (`%can_be_split_forall_dep) then ( fail ("can_be_split_forall_dep not supported in " ^ loc) ) else // This should never happen fail (loc ^ " goal in unexpected position") | _ -> fail (loc ^ " unexpected goal") let rec term_dict_assoc (#a: Type) (key: term) (l: list (term & a)) : Tac (list a) = match l with | [] -> [] | (k, v) :: q -> let q' = term_dict_assoc key q in if k `term_eq_old` key then (v :: q') else q' /// Returns true if the goal has been solved, false if it should be delayed let solve_or_delay (dict: list (term & (unit -> Tac bool))) : Tac bool = // Beta-reduce the goal first if possible norm []; let f = term_as_formula' (cur_goal ()) in match f with | App hd0 t -> if is_fvar hd0 (`%squash) then let hd, args = collect_app t in if hd `is_fvar` (`%can_be_split) then solve_can_be_split args else if hd `is_fvar` (`%can_be_split_forall) then solve_can_be_split_forall args else if hd `is_fvar` (`%equiv_forall) then solve_equiv_forall args else if hd `is_fvar` (`%can_be_split_post) then solve_can_be_split_post args else if hd `is_fvar` (`%equiv) then solve_equiv args else if hd `is_fvar` (`%can_be_split_dep) then solve_can_be_split_dep args else if hd `is_fvar` (`%can_be_split_forall_dep) then solve_can_be_split_forall_dep args else let candidates = term_dict_assoc hd dict in let run_tac (tac: unit -> Tac bool) () : Tac bool = focus tac in begin try first (List.Tot.map run_tac candidates) with _ -> (* this is a logical goal, solve it only if it has no uvars *) if List.Tot.length (free_uvars t) = 0 then (smt (); true) else false end else // TODO: handle non-squash goals here false | Comp (Eq _) l r -> let lnbr = List.Tot.length (free_uvars l) in let rnbr = List.Tot.length (free_uvars r) in // Only solve equality if one of the terms is completely determined if lnbr = 0 || rnbr = 0 then (trefl (); true) else false | _ -> false /// Returns true if it successfully solved a goal /// If it returns false, it means it didn't find any solvable goal, /// which should mean only delayed goals are left let rec vprop_term_uvars (t:term) : Tac (list int) = match inspect_unascribe t with | Tv_Uvar i' _ -> [i'] | Tv_App _ _ -> let hd, args = collect_app t in if is_star_or_unit hd then // Only count the number of unresolved slprops, not program implicits argv_uvars args else vprop_term_uvars hd | Tv_Abs _ t -> vprop_term_uvars t | _ -> [] and argv_uvars (args: list argv) : Tac (list int) = let t : unit -> Tac (list int) = fold_left (fun (n: unit -> Tac (list int)) (x, _) -> let t () : Tac (list int) = let l1 = n () in let l2 = vprop_term_uvars x in l1 `List.Tot.append` l2 in t ) (fun _ -> []) args in t () let rec remove_dups_from_sorted (#t: eqtype) (l: list t) : Tot (list t) = match l with | [] | [_] -> l | a1 :: a2 :: q -> if a1 = a2 then remove_dups_from_sorted (a2 :: q) else a1 :: remove_dups_from_sorted (a2 :: q) let simplify_list (l: list int) : Tot (list int) = remove_dups_from_sorted (List.Tot.sortWith (List.Tot.compare_of_bool (<)) l) let goal_term_uvars (t: term) : Tac (list int) = let hd, tl = collect_app t in if hd `is_fvar` (`%squash) then match tl with | [tl0, Q_Explicit] -> let _, tl1 = collect_app tl0 in simplify_list (argv_uvars tl1) | _ -> dump "ill-formed squash"; [] else [] let rec merge_sorted (l1 l2: list int) : Tot (list int) (decreases (List.Tot.length l1 + List.Tot.length l2)) = match l1 with | [] -> l2 | a1 :: q1 -> begin match l2 with | [] -> l1 | a2 :: q2 -> if a1 < a2 then a1 :: merge_sorted q1 l2 else if a2 < a1 then a2 :: merge_sorted l1 q2 else a1 :: merge_sorted q1 q2 end let rec sorted_lists_intersect (l1 l2: list int) : Tot bool (decreases (List.Tot.length l1 + List.Tot.length l2)) = match l1 with | [] -> false | a1 :: q1 -> begin match l2 with | [] -> false | a2 :: q2 -> if a1 = a2 then true else if a1 < a2 then sorted_lists_intersect q1 l2 else sorted_lists_intersect l1 q2 end /// TODO: cache the list of variables for each goal, to avoid computing them several times /// Compute the list of all vprop uvars that appear in the same goal as unsolved guard_vprop let rec compute_guarded_uvars1 (accu: list int) (g: list goal) : Tac (list int) = match g with | [] -> accu | a :: q -> let t = goal_type a in let accu' = if all_guards_solved t then accu else merge_sorted accu (goal_term_uvars t) in compute_guarded_uvars1 accu' q /// Enrich the list of vprop uvars with those that appear in the same goal let rec compute_guarded_uvars2 (accu: list int) (g: list goal) : Tac (list int) = match g with | [] -> accu | a :: q -> let t = goal_type a in let l = goal_term_uvars t in let accu' = if sorted_lists_intersect accu l then merge_sorted accu l else accu in compute_guarded_uvars2 accu' q let rec compute_guarded_uvars3 (accu: list int) (g: list goal) : Tac (list int) = let accu' = compute_guarded_uvars2 accu g in if accu = accu' then accu else compute_guarded_uvars3 accu' g let compute_guarded_uvars () : Tac (list int) = let g = goals () in let accu = compute_guarded_uvars1 [] g in compute_guarded_uvars3 accu g let rec pick_next (guarded_uvars: list int) (dict: _) (fuel: nat) : Tac bool = if fuel = 0 then false else match goals () with | [] -> true | a::_ -> let t = goal_type a in let l = goal_term_uvars t in let next () : Tac bool = later (); pick_next guarded_uvars dict (fuel - 1) in if sorted_lists_intersect guarded_uvars l then next () else if solve_or_delay dict then true else next () /// Main loop to schedule solving of goals. /// The goals () function fetches all current goals in the context let rec resolve_tac (dict: _) : Tac unit = match goals () with | [] -> () | g -> norm []; let guarded_uvars = compute_guarded_uvars () in // TODO: If it picks a goal it cannot solve yet, try all the other ones? if pick_next guarded_uvars dict (List.Tot.length g) then resolve_tac dict else fail "Could not make progress, no solvable goal found" let rec pick_next_logical (dict: _) (fuel: nat) : Tac bool = if fuel = 0 then false else match goals () with | [] -> true | _::_ -> if solve_or_delay dict then true else (later (); pick_next_logical dict (fuel - 1)) /// Special case for logical requires/ensures goals, which correspond only to equalities let rec resolve_tac_logical (dict: _) : Tac unit = match goals () with | [] -> () | g -> let fuel = List.Tot.length g in if pick_next_logical dict fuel then resolve_tac_logical dict else // This is only for requires/ensures constraints, which are equalities // There should always be a scheduling of constraints, but it can happen // that some uvar for the type of an equality is not resolved. // If we reach this point, we try to simply call the unifier instead of failing directly solve_all_eqs fuel /// Determining whether the type represented by term [t] corresponds to one of the logical (requires/ensures) goals let typ_contains_req_ens (t:term) : Tac bool = let name, _ = collect_app t in is_any_fvar name [`%req_t; `%ens_t; `%pure_wp; `%pure_pre; `%pure_post] /// Splits goals between separation logic goals (slgoals) and requires/ensures goals (loggoals) let rec filter_goals (l:list goal) : Tac (list goal * list goal) = match l with | [] -> [], [] | hd::tl -> let slgoals, loggoals = filter_goals tl in match term_as_formula' (goal_type hd) with | Comp (Eq t) _ _ -> if Some? t then let b = typ_contains_req_ens (Some?.v t) in if b then ( slgoals, hd::loggoals ) else ( hd::slgoals, loggoals ) else ( hd::slgoals, loggoals ) | App t _ -> if is_fvar t (`%squash) then hd::slgoals, loggoals else slgoals, loggoals | _ -> slgoals, loggoals let is_true (t:term) () : Tac unit = match term_as_formula t with | True_ -> exact (`()) | _ -> raise Goal_not_trivial /// Solve the maybe_emp goals: /// Normalize to unfold maybe_emp(_dep) and the reduce the if/then/else, and /// solve the goal (either an equality through trefl, or True through trivial)
false
false
Steel.Effect.Common.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val solve_maybe_emps (fuel: nat) : Tac unit
[ "recursion" ]
Steel.Effect.Common.solve_maybe_emps
{ "file_name": "lib/steel/Steel.Effect.Common.fsti", "git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e", "git_url": "https://github.com/FStarLang/steel.git", "project_name": "steel" }
fuel: Prims.nat -> FStar.Tactics.Effect.Tac Prims.unit
{ "end_col": 31, "end_line": 3054, "start_col": 2, "start_line": 3031 }
FStar.Pervasives.Lemma
val nat_from_bytes_le_eq_lemma: len:size_nat{len < 16} -> b:lseq uint8 len -> Lemma (let tmp = create 16 (u8 0) in nat_from_bytes_le b == nat_from_bytes_le (update_sub tmp 0 len b))
[ { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntVector", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let nat_from_bytes_le_eq_lemma len b = nat_from_bytes_le_eq_lemma_ len b
val nat_from_bytes_le_eq_lemma: len:size_nat{len < 16} -> b:lseq uint8 len -> Lemma (let tmp = create 16 (u8 0) in nat_from_bytes_le b == nat_from_bytes_le (update_sub tmp 0 len b)) let nat_from_bytes_le_eq_lemma len b =
false
null
true
nat_from_bytes_le_eq_lemma_ len b
{ "checked_file": "Hacl.Impl.Poly1305.Lemmas.fst.checked", "dependencies": [ "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntVector.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.Lemmas.fst" }
[ "lemma" ]
[ "Lib.IntTypes.size_nat", "Prims.b2t", "Prims.op_LessThan", "Lib.Sequence.lseq", "Lib.IntTypes.uint8", "Hacl.Impl.Poly1305.Lemmas.nat_from_bytes_le_eq_lemma_", "Prims.unit" ]
[]
module Hacl.Impl.Poly1305.Lemmas open FStar.Mul open Lib.IntTypes open Lib.IntVector open Lib.Sequence open Lib.ByteSequence #set-options "--z3rlimit 50 --max_fuel 1" val uint_from_bytes_le_lemma: b:lseq uint8 16 -> Lemma (let lo = uint_from_bytes_le #U64 (sub b 0 8) in let hi = uint_from_bytes_le #U64 (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi + uint_v lo) let uint_from_bytes_le_lemma b = let r1 = nat_from_bytes_le b in let r2 = uints_from_bytes_le #U64 #SEC #2 b in uints_from_bytes_le_nat_lemma #U64 #SEC #2 b; assert (r1 == nat_from_intseq_le r2); nat_from_intseq_le_slice_lemma #U64 #SEC #2 r2 1; assert (r1 == nat_from_intseq_le (Seq.slice r2 0 1) + pow2 64 * nat_from_intseq_le #U64 #SEC (Seq.slice r2 1 2)); nat_from_intseq_le_lemma0 (Seq.slice r2 0 1); nat_from_intseq_le_lemma0 (Seq.slice r2 1 2); assert (r1 == uint_v r2.[0] + pow2 64 * uint_v r2.[1]); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 b) val uints_from_bytes_le_lemma64_1: b:lseq uint8 16 -> Lemma (let lo:lseq uint64 1 = uints_from_bytes_le (sub b 0 8) in let hi:lseq uint64 1 = uints_from_bytes_le (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi.[0] + uint_v lo.[0]) let uints_from_bytes_le_lemma64_1 b = index_uints_from_bytes_le #U64 #SEC #1 (sub b 0 8) 0; index_uints_from_bytes_le #U64 #SEC #1 (sub b 8 8) 0; uint_from_bytes_le_lemma b val uints_from_bytes_le_lemma64_2: b:lseq uint8 32 -> Lemma (let lo:lseq uint64 2 = uints_from_bytes_le (sub b 0 16) in let hi:lseq uint64 2 = uints_from_bytes_le (sub b 16 16) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v hi.[1] + uint_v hi.[0]) let uints_from_bytes_le_lemma64_2 b = Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 0 16)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 16 16)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16) val uints_from_bytes_le_lemma64_4: b:lseq uint8 64 -> Lemma (let lo:lseq uint64 4 = uints_from_bytes_le (sub b 0 32) in let hi:lseq uint64 4 = uints_from_bytes_le (sub b 32 32) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in let b3 = nat_from_bytes_le (sub b 32 16) in let b4 = nat_from_bytes_le (sub b 48 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v lo.[3] + uint_v lo.[2] /\ b3 == pow2 64 * uint_v hi.[1] + uint_v hi.[0] /\ b4 == pow2 64 * uint_v hi.[3] + uint_v hi.[2]) let uints_from_bytes_le_lemma64_4 b = Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 0 32)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 32 32)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16); uint_from_bytes_le_lemma (sub b 32 16); uint_from_bytes_le_lemma (sub b 48 16) val uints64_to_bytes_le_lemma: lo:uint64 -> hi:uint64 -> Lemma (concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) == nat_to_bytes_le 16 (v hi * pow2 64 + v lo)) let uints64_to_bytes_le_lemma lo hi = let lp = nat_to_bytes_le #SEC 16 (v hi * pow2 64 + v lo) in let rp = concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) in assert (nat_from_bytes_le lp == v hi * pow2 64 + v lo); Seq.append_slices (uint_to_bytes_le lo) (uint_to_bytes_le hi); nat_from_intseq_le_slice_lemma #U8 #SEC #16 rp 8; assert (nat_from_bytes_le rp == nat_from_bytes_le (Seq.slice rp 0 8) + pow2 (8 * 8) * nat_from_bytes_le (Seq.slice rp 8 16)); assert (nat_from_bytes_le rp == nat_from_bytes_le (uint_to_bytes_le lo) + pow2 64 * nat_from_bytes_le (uint_to_bytes_le hi)); lemma_uint_to_bytes_le_preserves_value lo; lemma_uint_to_bytes_le_preserves_value hi; nat_from_intseq_le_inj lp rp val lemma_nat_from_bytes_le_zeroes: len:size_nat -> b:lseq uint8 len -> Lemma (requires (forall (i:nat). i < len ==> b.[i] == u8 0)) (ensures nat_from_intseq_le b == 0) let rec lemma_nat_from_bytes_le_zeroes len b = if len = 0 then () else begin nat_from_intseq_le_slice_lemma #U8 #SEC #len b 1; nat_from_intseq_le_lemma0 (Seq.slice b 0 1); lemma_nat_from_bytes_le_zeroes (len-1) (Seq.slice b 1 len) end val nat_from_bytes_le_eq_lemma_: len:size_nat{len < 16} -> b:lseq uint8 len -> Lemma (let tmp = create 16 (u8 0) in nat_from_intseq_le b == nat_from_intseq_le (update_sub tmp 0 len b)) let nat_from_bytes_le_eq_lemma_ len b = let tmp = create 16 (u8 0) in let r = update_sub tmp 0 len b in assert (Seq.slice r 0 len == b); assert (forall (i:nat). len <= i /\ i < 16 ==> r.[i] == u8 0); assert (forall (i:nat). i < 16 - len ==> Seq.index (Seq.slice r len 16) i == u8 0); nat_from_intseq_le_slice_lemma #U8 #SEC #16 r len; assert (nat_from_intseq_le r == nat_from_intseq_le (Seq.slice r 0 len) + pow2 (len * 8) * nat_from_intseq_le (Seq.slice r len 16)); assert (nat_from_intseq_le r == nat_from_intseq_le b + pow2 (len * 8) * nat_from_intseq_le (Seq.slice r len 16)); lemma_nat_from_bytes_le_zeroes (16 - len) (Seq.slice r len 16) val nat_from_bytes_le_eq_lemma: len:size_nat{len < 16} -> b:lseq uint8 len -> Lemma (let tmp = create 16 (u8 0) in nat_from_bytes_le b == nat_from_bytes_le (update_sub tmp 0 len b))
false
false
Hacl.Impl.Poly1305.Lemmas.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 1, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val nat_from_bytes_le_eq_lemma: len:size_nat{len < 16} -> b:lseq uint8 len -> Lemma (let tmp = create 16 (u8 0) in nat_from_bytes_le b == nat_from_bytes_le (update_sub tmp 0 len b))
[]
Hacl.Impl.Poly1305.Lemmas.nat_from_bytes_le_eq_lemma
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.Lemmas.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
len: Lib.IntTypes.size_nat{len < 16} -> b: Lib.Sequence.lseq Lib.IntTypes.uint8 len -> FStar.Pervasives.Lemma (ensures (let tmp = Lib.Sequence.create 16 (Lib.IntTypes.u8 0) in Lib.ByteSequence.nat_from_bytes_le b == Lib.ByteSequence.nat_from_bytes_le (Lib.Sequence.update_sub tmp 0 len b)))
{ "end_col": 72, "end_line": 125, "start_col": 39, "start_line": 125 }
FStar.Pervasives.Lemma
val lemma_nat_from_bytes_le_zeroes: len:size_nat -> b:lseq uint8 len -> Lemma (requires (forall (i:nat). i < len ==> b.[i] == u8 0)) (ensures nat_from_intseq_le b == 0)
[ { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntVector", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let rec lemma_nat_from_bytes_le_zeroes len b = if len = 0 then () else begin nat_from_intseq_le_slice_lemma #U8 #SEC #len b 1; nat_from_intseq_le_lemma0 (Seq.slice b 0 1); lemma_nat_from_bytes_le_zeroes (len-1) (Seq.slice b 1 len) end
val lemma_nat_from_bytes_le_zeroes: len:size_nat -> b:lseq uint8 len -> Lemma (requires (forall (i:nat). i < len ==> b.[i] == u8 0)) (ensures nat_from_intseq_le b == 0) let rec lemma_nat_from_bytes_le_zeroes len b =
false
null
true
if len = 0 then () else (nat_from_intseq_le_slice_lemma #U8 #SEC #len b 1; nat_from_intseq_le_lemma0 (Seq.slice b 0 1); lemma_nat_from_bytes_le_zeroes (len - 1) (Seq.slice b 1 len))
{ "checked_file": "Hacl.Impl.Poly1305.Lemmas.fst.checked", "dependencies": [ "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntVector.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.Lemmas.fst" }
[ "lemma" ]
[ "Lib.IntTypes.size_nat", "Lib.Sequence.lseq", "Lib.IntTypes.uint8", "Prims.op_Equality", "Prims.int", "Prims.bool", "Hacl.Impl.Poly1305.Lemmas.lemma_nat_from_bytes_le_zeroes", "Prims.op_Subtraction", "FStar.Seq.Base.slice", "Prims.unit", "Lib.ByteSequence.nat_from_intseq_le_lemma0", "Lib.IntTypes.U8", "Lib.IntTypes.SEC", "Lib.ByteSequence.nat_from_intseq_le_slice_lemma" ]
[]
module Hacl.Impl.Poly1305.Lemmas open FStar.Mul open Lib.IntTypes open Lib.IntVector open Lib.Sequence open Lib.ByteSequence #set-options "--z3rlimit 50 --max_fuel 1" val uint_from_bytes_le_lemma: b:lseq uint8 16 -> Lemma (let lo = uint_from_bytes_le #U64 (sub b 0 8) in let hi = uint_from_bytes_le #U64 (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi + uint_v lo) let uint_from_bytes_le_lemma b = let r1 = nat_from_bytes_le b in let r2 = uints_from_bytes_le #U64 #SEC #2 b in uints_from_bytes_le_nat_lemma #U64 #SEC #2 b; assert (r1 == nat_from_intseq_le r2); nat_from_intseq_le_slice_lemma #U64 #SEC #2 r2 1; assert (r1 == nat_from_intseq_le (Seq.slice r2 0 1) + pow2 64 * nat_from_intseq_le #U64 #SEC (Seq.slice r2 1 2)); nat_from_intseq_le_lemma0 (Seq.slice r2 0 1); nat_from_intseq_le_lemma0 (Seq.slice r2 1 2); assert (r1 == uint_v r2.[0] + pow2 64 * uint_v r2.[1]); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 b) val uints_from_bytes_le_lemma64_1: b:lseq uint8 16 -> Lemma (let lo:lseq uint64 1 = uints_from_bytes_le (sub b 0 8) in let hi:lseq uint64 1 = uints_from_bytes_le (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi.[0] + uint_v lo.[0]) let uints_from_bytes_le_lemma64_1 b = index_uints_from_bytes_le #U64 #SEC #1 (sub b 0 8) 0; index_uints_from_bytes_le #U64 #SEC #1 (sub b 8 8) 0; uint_from_bytes_le_lemma b val uints_from_bytes_le_lemma64_2: b:lseq uint8 32 -> Lemma (let lo:lseq uint64 2 = uints_from_bytes_le (sub b 0 16) in let hi:lseq uint64 2 = uints_from_bytes_le (sub b 16 16) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v hi.[1] + uint_v hi.[0]) let uints_from_bytes_le_lemma64_2 b = Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 0 16)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 16 16)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16) val uints_from_bytes_le_lemma64_4: b:lseq uint8 64 -> Lemma (let lo:lseq uint64 4 = uints_from_bytes_le (sub b 0 32) in let hi:lseq uint64 4 = uints_from_bytes_le (sub b 32 32) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in let b3 = nat_from_bytes_le (sub b 32 16) in let b4 = nat_from_bytes_le (sub b 48 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v lo.[3] + uint_v lo.[2] /\ b3 == pow2 64 * uint_v hi.[1] + uint_v hi.[0] /\ b4 == pow2 64 * uint_v hi.[3] + uint_v hi.[2]) let uints_from_bytes_le_lemma64_4 b = Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 0 32)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 32 32)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16); uint_from_bytes_le_lemma (sub b 32 16); uint_from_bytes_le_lemma (sub b 48 16) val uints64_to_bytes_le_lemma: lo:uint64 -> hi:uint64 -> Lemma (concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) == nat_to_bytes_le 16 (v hi * pow2 64 + v lo)) let uints64_to_bytes_le_lemma lo hi = let lp = nat_to_bytes_le #SEC 16 (v hi * pow2 64 + v lo) in let rp = concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) in assert (nat_from_bytes_le lp == v hi * pow2 64 + v lo); Seq.append_slices (uint_to_bytes_le lo) (uint_to_bytes_le hi); nat_from_intseq_le_slice_lemma #U8 #SEC #16 rp 8; assert (nat_from_bytes_le rp == nat_from_bytes_le (Seq.slice rp 0 8) + pow2 (8 * 8) * nat_from_bytes_le (Seq.slice rp 8 16)); assert (nat_from_bytes_le rp == nat_from_bytes_le (uint_to_bytes_le lo) + pow2 64 * nat_from_bytes_le (uint_to_bytes_le hi)); lemma_uint_to_bytes_le_preserves_value lo; lemma_uint_to_bytes_le_preserves_value hi; nat_from_intseq_le_inj lp rp val lemma_nat_from_bytes_le_zeroes: len:size_nat -> b:lseq uint8 len -> Lemma (requires (forall (i:nat). i < len ==> b.[i] == u8 0)) (ensures nat_from_intseq_le b == 0)
false
false
Hacl.Impl.Poly1305.Lemmas.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 1, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val lemma_nat_from_bytes_le_zeroes: len:size_nat -> b:lseq uint8 len -> Lemma (requires (forall (i:nat). i < len ==> b.[i] == u8 0)) (ensures nat_from_intseq_le b == 0)
[ "recursion" ]
Hacl.Impl.Poly1305.Lemmas.lemma_nat_from_bytes_le_zeroes
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.Lemmas.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
len: Lib.IntTypes.size_nat -> b: Lib.Sequence.lseq Lib.IntTypes.uint8 len -> FStar.Pervasives.Lemma (requires forall (i: Prims.nat). i < len ==> b.[ i ] == Lib.IntTypes.u8 0) (ensures Lib.ByteSequence.nat_from_intseq_le b == 0)
{ "end_col": 66, "end_line": 102, "start_col": 2, "start_line": 98 }
FStar.Pervasives.Lemma
val uints_from_bytes_le_lemma64_1: b:lseq uint8 16 -> Lemma (let lo:lseq uint64 1 = uints_from_bytes_le (sub b 0 8) in let hi:lseq uint64 1 = uints_from_bytes_le (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi.[0] + uint_v lo.[0])
[ { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntVector", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let uints_from_bytes_le_lemma64_1 b = index_uints_from_bytes_le #U64 #SEC #1 (sub b 0 8) 0; index_uints_from_bytes_le #U64 #SEC #1 (sub b 8 8) 0; uint_from_bytes_le_lemma b
val uints_from_bytes_le_lemma64_1: b:lseq uint8 16 -> Lemma (let lo:lseq uint64 1 = uints_from_bytes_le (sub b 0 8) in let hi:lseq uint64 1 = uints_from_bytes_le (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi.[0] + uint_v lo.[0]) let uints_from_bytes_le_lemma64_1 b =
false
null
true
index_uints_from_bytes_le #U64 #SEC #1 (sub b 0 8) 0; index_uints_from_bytes_le #U64 #SEC #1 (sub b 8 8) 0; uint_from_bytes_le_lemma b
{ "checked_file": "Hacl.Impl.Poly1305.Lemmas.fst.checked", "dependencies": [ "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntVector.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.Lemmas.fst" }
[ "lemma" ]
[ "Lib.Sequence.lseq", "Lib.IntTypes.uint8", "Hacl.Impl.Poly1305.Lemmas.uint_from_bytes_le_lemma", "Prims.unit", "Lib.ByteSequence.index_uints_from_bytes_le", "Lib.IntTypes.U64", "Lib.IntTypes.SEC", "Lib.Sequence.sub" ]
[]
module Hacl.Impl.Poly1305.Lemmas open FStar.Mul open Lib.IntTypes open Lib.IntVector open Lib.Sequence open Lib.ByteSequence #set-options "--z3rlimit 50 --max_fuel 1" val uint_from_bytes_le_lemma: b:lseq uint8 16 -> Lemma (let lo = uint_from_bytes_le #U64 (sub b 0 8) in let hi = uint_from_bytes_le #U64 (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi + uint_v lo) let uint_from_bytes_le_lemma b = let r1 = nat_from_bytes_le b in let r2 = uints_from_bytes_le #U64 #SEC #2 b in uints_from_bytes_le_nat_lemma #U64 #SEC #2 b; assert (r1 == nat_from_intseq_le r2); nat_from_intseq_le_slice_lemma #U64 #SEC #2 r2 1; assert (r1 == nat_from_intseq_le (Seq.slice r2 0 1) + pow2 64 * nat_from_intseq_le #U64 #SEC (Seq.slice r2 1 2)); nat_from_intseq_le_lemma0 (Seq.slice r2 0 1); nat_from_intseq_le_lemma0 (Seq.slice r2 1 2); assert (r1 == uint_v r2.[0] + pow2 64 * uint_v r2.[1]); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 b) val uints_from_bytes_le_lemma64_1: b:lseq uint8 16 -> Lemma (let lo:lseq uint64 1 = uints_from_bytes_le (sub b 0 8) in let hi:lseq uint64 1 = uints_from_bytes_le (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi.[0] + uint_v lo.[0])
false
false
Hacl.Impl.Poly1305.Lemmas.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 1, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val uints_from_bytes_le_lemma64_1: b:lseq uint8 16 -> Lemma (let lo:lseq uint64 1 = uints_from_bytes_le (sub b 0 8) in let hi:lseq uint64 1 = uints_from_bytes_le (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi.[0] + uint_v lo.[0])
[]
Hacl.Impl.Poly1305.Lemmas.uints_from_bytes_le_lemma64_1
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.Lemmas.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
b: Lib.Sequence.lseq Lib.IntTypes.uint8 16 -> FStar.Pervasives.Lemma (ensures (let lo = Lib.ByteSequence.uints_from_bytes_le (Lib.Sequence.sub b 0 8) in let hi = Lib.ByteSequence.uints_from_bytes_le (Lib.Sequence.sub b 8 8) in Lib.ByteSequence.nat_from_bytes_le b == Prims.pow2 64 * Lib.IntTypes.uint_v hi.[ 0 ] + Lib.IntTypes.uint_v lo.[ 0 ]))
{ "end_col": 28, "end_line": 38, "start_col": 2, "start_line": 36 }
FStar.Pervasives.Lemma
val uints64_to_bytes_le_lemma: lo:uint64 -> hi:uint64 -> Lemma (concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) == nat_to_bytes_le 16 (v hi * pow2 64 + v lo))
[ { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntVector", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let uints64_to_bytes_le_lemma lo hi = let lp = nat_to_bytes_le #SEC 16 (v hi * pow2 64 + v lo) in let rp = concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) in assert (nat_from_bytes_le lp == v hi * pow2 64 + v lo); Seq.append_slices (uint_to_bytes_le lo) (uint_to_bytes_le hi); nat_from_intseq_le_slice_lemma #U8 #SEC #16 rp 8; assert (nat_from_bytes_le rp == nat_from_bytes_le (Seq.slice rp 0 8) + pow2 (8 * 8) * nat_from_bytes_le (Seq.slice rp 8 16)); assert (nat_from_bytes_le rp == nat_from_bytes_le (uint_to_bytes_le lo) + pow2 64 * nat_from_bytes_le (uint_to_bytes_le hi)); lemma_uint_to_bytes_le_preserves_value lo; lemma_uint_to_bytes_le_preserves_value hi; nat_from_intseq_le_inj lp rp
val uints64_to_bytes_le_lemma: lo:uint64 -> hi:uint64 -> Lemma (concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) == nat_to_bytes_le 16 (v hi * pow2 64 + v lo)) let uints64_to_bytes_le_lemma lo hi =
false
null
true
let lp = nat_to_bytes_le #SEC 16 (v hi * pow2 64 + v lo) in let rp = concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) in assert (nat_from_bytes_le lp == v hi * pow2 64 + v lo); Seq.append_slices (uint_to_bytes_le lo) (uint_to_bytes_le hi); nat_from_intseq_le_slice_lemma #U8 #SEC #16 rp 8; assert (nat_from_bytes_le rp == nat_from_bytes_le (Seq.slice rp 0 8) + pow2 (8 * 8) * nat_from_bytes_le (Seq.slice rp 8 16)); assert (nat_from_bytes_le rp == nat_from_bytes_le (uint_to_bytes_le lo) + pow2 64 * nat_from_bytes_le (uint_to_bytes_le hi)); lemma_uint_to_bytes_le_preserves_value lo; lemma_uint_to_bytes_le_preserves_value hi; nat_from_intseq_le_inj lp rp
{ "checked_file": "Hacl.Impl.Poly1305.Lemmas.fst.checked", "dependencies": [ "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntVector.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.Lemmas.fst" }
[ "lemma" ]
[ "Lib.IntTypes.uint64", "Lib.ByteSequence.nat_from_intseq_le_inj", "Lib.IntTypes.U8", "Lib.IntTypes.SEC", "Prims.unit", "Lib.ByteSequence.lemma_uint_to_bytes_le_preserves_value", "Lib.IntTypes.U64", "Prims._assert", "Prims.eq2", "Prims.int", "Lib.ByteSequence.nat_from_bytes_le", "Prims.op_Addition", "Lib.ByteSequence.uint_to_bytes_le", "FStar.Mul.op_Star", "Prims.pow2", "FStar.Seq.Base.slice", "Lib.IntTypes.uint_t", "Lib.ByteSequence.nat_from_intseq_le_slice_lemma", "FStar.Seq.Properties.append_slices", "Lib.IntTypes.v", "Lib.Sequence.lseq", "Lib.IntTypes.int_t", "FStar.Seq.Base.seq", "Lib.Sequence.to_seq", "FStar.Seq.Base.append", "Lib.Sequence.concat", "Lib.IntTypes.numbytes", "Lib.Sequence.seq", "Prims.l_and", "Prims.nat", "Lib.Sequence.length", "Prims.l_or", "Prims.b2t", "Prims.op_LessThan", "Prims.op_Multiply", "Lib.ByteSequence.nat_from_intseq_le", "Lib.ByteSequence.nat_to_bytes_le" ]
[]
module Hacl.Impl.Poly1305.Lemmas open FStar.Mul open Lib.IntTypes open Lib.IntVector open Lib.Sequence open Lib.ByteSequence #set-options "--z3rlimit 50 --max_fuel 1" val uint_from_bytes_le_lemma: b:lseq uint8 16 -> Lemma (let lo = uint_from_bytes_le #U64 (sub b 0 8) in let hi = uint_from_bytes_le #U64 (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi + uint_v lo) let uint_from_bytes_le_lemma b = let r1 = nat_from_bytes_le b in let r2 = uints_from_bytes_le #U64 #SEC #2 b in uints_from_bytes_le_nat_lemma #U64 #SEC #2 b; assert (r1 == nat_from_intseq_le r2); nat_from_intseq_le_slice_lemma #U64 #SEC #2 r2 1; assert (r1 == nat_from_intseq_le (Seq.slice r2 0 1) + pow2 64 * nat_from_intseq_le #U64 #SEC (Seq.slice r2 1 2)); nat_from_intseq_le_lemma0 (Seq.slice r2 0 1); nat_from_intseq_le_lemma0 (Seq.slice r2 1 2); assert (r1 == uint_v r2.[0] + pow2 64 * uint_v r2.[1]); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 b) val uints_from_bytes_le_lemma64_1: b:lseq uint8 16 -> Lemma (let lo:lseq uint64 1 = uints_from_bytes_le (sub b 0 8) in let hi:lseq uint64 1 = uints_from_bytes_le (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi.[0] + uint_v lo.[0]) let uints_from_bytes_le_lemma64_1 b = index_uints_from_bytes_le #U64 #SEC #1 (sub b 0 8) 0; index_uints_from_bytes_le #U64 #SEC #1 (sub b 8 8) 0; uint_from_bytes_le_lemma b val uints_from_bytes_le_lemma64_2: b:lseq uint8 32 -> Lemma (let lo:lseq uint64 2 = uints_from_bytes_le (sub b 0 16) in let hi:lseq uint64 2 = uints_from_bytes_le (sub b 16 16) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v hi.[1] + uint_v hi.[0]) let uints_from_bytes_le_lemma64_2 b = Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 0 16)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 16 16)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16) val uints_from_bytes_le_lemma64_4: b:lseq uint8 64 -> Lemma (let lo:lseq uint64 4 = uints_from_bytes_le (sub b 0 32) in let hi:lseq uint64 4 = uints_from_bytes_le (sub b 32 32) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in let b3 = nat_from_bytes_le (sub b 32 16) in let b4 = nat_from_bytes_le (sub b 48 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v lo.[3] + uint_v lo.[2] /\ b3 == pow2 64 * uint_v hi.[1] + uint_v hi.[0] /\ b4 == pow2 64 * uint_v hi.[3] + uint_v hi.[2]) let uints_from_bytes_le_lemma64_4 b = Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 0 32)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 32 32)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16); uint_from_bytes_le_lemma (sub b 32 16); uint_from_bytes_le_lemma (sub b 48 16) val uints64_to_bytes_le_lemma: lo:uint64 -> hi:uint64 -> Lemma (concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) == nat_to_bytes_le 16 (v hi * pow2 64 + v lo))
false
false
Hacl.Impl.Poly1305.Lemmas.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 1, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val uints64_to_bytes_le_lemma: lo:uint64 -> hi:uint64 -> Lemma (concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) == nat_to_bytes_le 16 (v hi * pow2 64 + v lo))
[]
Hacl.Impl.Poly1305.Lemmas.uints64_to_bytes_le_lemma
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.Lemmas.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
lo: Lib.IntTypes.uint64 -> hi: Lib.IntTypes.uint64 -> FStar.Pervasives.Lemma (ensures Lib.Sequence.concat (Lib.ByteSequence.uint_to_bytes_le lo) (Lib.ByteSequence.uint_to_bytes_le hi) == Lib.ByteSequence.nat_to_bytes_le 16 (Lib.IntTypes.v hi * Prims.pow2 64 + Lib.IntTypes.v lo))
{ "end_col": 30, "end_line": 90, "start_col": 37, "start_line": 80 }
FStar.Pervasives.Lemma
val nat_from_bytes_le_eq_lemma_: len:size_nat{len < 16} -> b:lseq uint8 len -> Lemma (let tmp = create 16 (u8 0) in nat_from_intseq_le b == nat_from_intseq_le (update_sub tmp 0 len b))
[ { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntVector", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let nat_from_bytes_le_eq_lemma_ len b = let tmp = create 16 (u8 0) in let r = update_sub tmp 0 len b in assert (Seq.slice r 0 len == b); assert (forall (i:nat). len <= i /\ i < 16 ==> r.[i] == u8 0); assert (forall (i:nat). i < 16 - len ==> Seq.index (Seq.slice r len 16) i == u8 0); nat_from_intseq_le_slice_lemma #U8 #SEC #16 r len; assert (nat_from_intseq_le r == nat_from_intseq_le (Seq.slice r 0 len) + pow2 (len * 8) * nat_from_intseq_le (Seq.slice r len 16)); assert (nat_from_intseq_le r == nat_from_intseq_le b + pow2 (len * 8) * nat_from_intseq_le (Seq.slice r len 16)); lemma_nat_from_bytes_le_zeroes (16 - len) (Seq.slice r len 16)
val nat_from_bytes_le_eq_lemma_: len:size_nat{len < 16} -> b:lseq uint8 len -> Lemma (let tmp = create 16 (u8 0) in nat_from_intseq_le b == nat_from_intseq_le (update_sub tmp 0 len b)) let nat_from_bytes_le_eq_lemma_ len b =
false
null
true
let tmp = create 16 (u8 0) in let r = update_sub tmp 0 len b in assert (Seq.slice r 0 len == b); assert (forall (i: nat). len <= i /\ i < 16 ==> r.[ i ] == u8 0); assert (forall (i: nat). i < 16 - len ==> Seq.index (Seq.slice r len 16) i == u8 0); nat_from_intseq_le_slice_lemma #U8 #SEC #16 r len; assert (nat_from_intseq_le r == nat_from_intseq_le (Seq.slice r 0 len) + pow2 (len * 8) * nat_from_intseq_le (Seq.slice r len 16)); assert (nat_from_intseq_le r == nat_from_intseq_le b + pow2 (len * 8) * nat_from_intseq_le (Seq.slice r len 16)); lemma_nat_from_bytes_le_zeroes (16 - len) (Seq.slice r len 16)
{ "checked_file": "Hacl.Impl.Poly1305.Lemmas.fst.checked", "dependencies": [ "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntVector.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.Lemmas.fst" }
[ "lemma" ]
[ "Lib.IntTypes.size_nat", "Prims.b2t", "Prims.op_LessThan", "Lib.Sequence.lseq", "Lib.IntTypes.uint8", "Hacl.Impl.Poly1305.Lemmas.lemma_nat_from_bytes_le_zeroes", "Prims.op_Subtraction", "FStar.Seq.Base.slice", "Prims.unit", "Prims._assert", "Prims.eq2", "Prims.int", "Lib.ByteSequence.nat_from_intseq_le", "Lib.IntTypes.U8", "Lib.IntTypes.SEC", "Prims.op_Addition", "FStar.Mul.op_Star", "Prims.pow2", "Lib.ByteSequence.nat_from_intseq_le_slice_lemma", "Prims.l_Forall", "Prims.nat", "Prims.l_imp", "FStar.Seq.Base.index", "Lib.IntTypes.u8", "Prims.l_and", "Prims.op_LessThanOrEqual", "Prims.l_or", "Lib.Sequence.to_seq", "Lib.IntTypes.range_t", "Lib.IntTypes.v", "Lib.Sequence.op_String_Access", "FStar.Seq.Base.seq", "Lib.IntTypes.int_t", "Lib.Sequence.sub", "Lib.Sequence.index", "Lib.Sequence.update_sub", "FStar.Seq.Base.create", "Lib.IntTypes.mk_int", "Lib.Sequence.create" ]
[]
module Hacl.Impl.Poly1305.Lemmas open FStar.Mul open Lib.IntTypes open Lib.IntVector open Lib.Sequence open Lib.ByteSequence #set-options "--z3rlimit 50 --max_fuel 1" val uint_from_bytes_le_lemma: b:lseq uint8 16 -> Lemma (let lo = uint_from_bytes_le #U64 (sub b 0 8) in let hi = uint_from_bytes_le #U64 (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi + uint_v lo) let uint_from_bytes_le_lemma b = let r1 = nat_from_bytes_le b in let r2 = uints_from_bytes_le #U64 #SEC #2 b in uints_from_bytes_le_nat_lemma #U64 #SEC #2 b; assert (r1 == nat_from_intseq_le r2); nat_from_intseq_le_slice_lemma #U64 #SEC #2 r2 1; assert (r1 == nat_from_intseq_le (Seq.slice r2 0 1) + pow2 64 * nat_from_intseq_le #U64 #SEC (Seq.slice r2 1 2)); nat_from_intseq_le_lemma0 (Seq.slice r2 0 1); nat_from_intseq_le_lemma0 (Seq.slice r2 1 2); assert (r1 == uint_v r2.[0] + pow2 64 * uint_v r2.[1]); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 b) val uints_from_bytes_le_lemma64_1: b:lseq uint8 16 -> Lemma (let lo:lseq uint64 1 = uints_from_bytes_le (sub b 0 8) in let hi:lseq uint64 1 = uints_from_bytes_le (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi.[0] + uint_v lo.[0]) let uints_from_bytes_le_lemma64_1 b = index_uints_from_bytes_le #U64 #SEC #1 (sub b 0 8) 0; index_uints_from_bytes_le #U64 #SEC #1 (sub b 8 8) 0; uint_from_bytes_le_lemma b val uints_from_bytes_le_lemma64_2: b:lseq uint8 32 -> Lemma (let lo:lseq uint64 2 = uints_from_bytes_le (sub b 0 16) in let hi:lseq uint64 2 = uints_from_bytes_le (sub b 16 16) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v hi.[1] + uint_v hi.[0]) let uints_from_bytes_le_lemma64_2 b = Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 0 16)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 16 16)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16) val uints_from_bytes_le_lemma64_4: b:lseq uint8 64 -> Lemma (let lo:lseq uint64 4 = uints_from_bytes_le (sub b 0 32) in let hi:lseq uint64 4 = uints_from_bytes_le (sub b 32 32) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in let b3 = nat_from_bytes_le (sub b 32 16) in let b4 = nat_from_bytes_le (sub b 48 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v lo.[3] + uint_v lo.[2] /\ b3 == pow2 64 * uint_v hi.[1] + uint_v hi.[0] /\ b4 == pow2 64 * uint_v hi.[3] + uint_v hi.[2]) let uints_from_bytes_le_lemma64_4 b = Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 0 32)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 32 32)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16); uint_from_bytes_le_lemma (sub b 32 16); uint_from_bytes_le_lemma (sub b 48 16) val uints64_to_bytes_le_lemma: lo:uint64 -> hi:uint64 -> Lemma (concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) == nat_to_bytes_le 16 (v hi * pow2 64 + v lo)) let uints64_to_bytes_le_lemma lo hi = let lp = nat_to_bytes_le #SEC 16 (v hi * pow2 64 + v lo) in let rp = concat (uint_to_bytes_le lo) (uint_to_bytes_le hi) in assert (nat_from_bytes_le lp == v hi * pow2 64 + v lo); Seq.append_slices (uint_to_bytes_le lo) (uint_to_bytes_le hi); nat_from_intseq_le_slice_lemma #U8 #SEC #16 rp 8; assert (nat_from_bytes_le rp == nat_from_bytes_le (Seq.slice rp 0 8) + pow2 (8 * 8) * nat_from_bytes_le (Seq.slice rp 8 16)); assert (nat_from_bytes_le rp == nat_from_bytes_le (uint_to_bytes_le lo) + pow2 64 * nat_from_bytes_le (uint_to_bytes_le hi)); lemma_uint_to_bytes_le_preserves_value lo; lemma_uint_to_bytes_le_preserves_value hi; nat_from_intseq_le_inj lp rp val lemma_nat_from_bytes_le_zeroes: len:size_nat -> b:lseq uint8 len -> Lemma (requires (forall (i:nat). i < len ==> b.[i] == u8 0)) (ensures nat_from_intseq_le b == 0) let rec lemma_nat_from_bytes_le_zeroes len b = if len = 0 then () else begin nat_from_intseq_le_slice_lemma #U8 #SEC #len b 1; nat_from_intseq_le_lemma0 (Seq.slice b 0 1); lemma_nat_from_bytes_le_zeroes (len-1) (Seq.slice b 1 len) end val nat_from_bytes_le_eq_lemma_: len:size_nat{len < 16} -> b:lseq uint8 len -> Lemma (let tmp = create 16 (u8 0) in nat_from_intseq_le b == nat_from_intseq_le (update_sub tmp 0 len b))
false
false
Hacl.Impl.Poly1305.Lemmas.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 1, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val nat_from_bytes_le_eq_lemma_: len:size_nat{len < 16} -> b:lseq uint8 len -> Lemma (let tmp = create 16 (u8 0) in nat_from_intseq_le b == nat_from_intseq_le (update_sub tmp 0 len b))
[]
Hacl.Impl.Poly1305.Lemmas.nat_from_bytes_le_eq_lemma_
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.Lemmas.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
len: Lib.IntTypes.size_nat{len < 16} -> b: Lib.Sequence.lseq Lib.IntTypes.uint8 len -> FStar.Pervasives.Lemma (ensures (let tmp = Lib.Sequence.create 16 (Lib.IntTypes.u8 0) in Lib.ByteSequence.nat_from_intseq_le b == Lib.ByteSequence.nat_from_intseq_le (Lib.Sequence.update_sub tmp 0 len b)))
{ "end_col": 64, "end_line": 118, "start_col": 39, "start_line": 109 }
FStar.Pervasives.Lemma
val uint_from_bytes_le_lemma: b:lseq uint8 16 -> Lemma (let lo = uint_from_bytes_le #U64 (sub b 0 8) in let hi = uint_from_bytes_le #U64 (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi + uint_v lo)
[ { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntVector", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let uint_from_bytes_le_lemma b = let r1 = nat_from_bytes_le b in let r2 = uints_from_bytes_le #U64 #SEC #2 b in uints_from_bytes_le_nat_lemma #U64 #SEC #2 b; assert (r1 == nat_from_intseq_le r2); nat_from_intseq_le_slice_lemma #U64 #SEC #2 r2 1; assert (r1 == nat_from_intseq_le (Seq.slice r2 0 1) + pow2 64 * nat_from_intseq_le #U64 #SEC (Seq.slice r2 1 2)); nat_from_intseq_le_lemma0 (Seq.slice r2 0 1); nat_from_intseq_le_lemma0 (Seq.slice r2 1 2); assert (r1 == uint_v r2.[0] + pow2 64 * uint_v r2.[1]); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 b)
val uint_from_bytes_le_lemma: b:lseq uint8 16 -> Lemma (let lo = uint_from_bytes_le #U64 (sub b 0 8) in let hi = uint_from_bytes_le #U64 (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi + uint_v lo) let uint_from_bytes_le_lemma b =
false
null
true
let r1 = nat_from_bytes_le b in let r2 = uints_from_bytes_le #U64 #SEC #2 b in uints_from_bytes_le_nat_lemma #U64 #SEC #2 b; assert (r1 == nat_from_intseq_le r2); nat_from_intseq_le_slice_lemma #U64 #SEC #2 r2 1; assert (r1 == nat_from_intseq_le (Seq.slice r2 0 1) + pow2 64 * nat_from_intseq_le #U64 #SEC (Seq.slice r2 1 2)); nat_from_intseq_le_lemma0 (Seq.slice r2 0 1); nat_from_intseq_le_lemma0 (Seq.slice r2 1 2); assert (r1 == uint_v r2.[ 0 ] + pow2 64 * uint_v r2.[ 1 ]); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 b)
{ "checked_file": "Hacl.Impl.Poly1305.Lemmas.fst.checked", "dependencies": [ "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntVector.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.Lemmas.fst" }
[ "lemma" ]
[ "Lib.Sequence.lseq", "Lib.IntTypes.uint8", "FStar.Classical.forall_intro", "Lib.IntTypes.size_nat", "Prims.b2t", "Prims.op_LessThan", "Prims.eq2", "Lib.IntTypes.uint_t", "Lib.IntTypes.U64", "Lib.IntTypes.SEC", "Lib.Sequence.index", "Lib.ByteSequence.uints_from_bytes_le", "Lib.ByteSequence.uint_from_bytes_le", "Lib.Sequence.sub", "Lib.IntTypes.U8", "FStar.Mul.op_Star", "Lib.IntTypes.numbytes", "Lib.ByteSequence.index_uints_from_bytes_le", "Prims.unit", "Prims._assert", "Prims.int", "Prims.op_Addition", "Lib.IntTypes.uint_v", "Lib.Sequence.op_String_Access", "Prims.pow2", "Lib.ByteSequence.nat_from_intseq_le_lemma0", "FStar.Seq.Base.slice", "Lib.ByteSequence.nat_from_intseq_le", "Lib.ByteSequence.nat_from_intseq_le_slice_lemma", "Prims.nat", "Prims.l_or", "Lib.Sequence.length", "Lib.IntTypes.bits", "Lib.ByteSequence.uints_from_bytes_le_nat_lemma", "Lib.IntTypes.int_t", "Prims.op_Multiply", "Lib.ByteSequence.nat_from_bytes_le" ]
[]
module Hacl.Impl.Poly1305.Lemmas open FStar.Mul open Lib.IntTypes open Lib.IntVector open Lib.Sequence open Lib.ByteSequence #set-options "--z3rlimit 50 --max_fuel 1" val uint_from_bytes_le_lemma: b:lseq uint8 16 -> Lemma (let lo = uint_from_bytes_le #U64 (sub b 0 8) in let hi = uint_from_bytes_le #U64 (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi + uint_v lo)
false
false
Hacl.Impl.Poly1305.Lemmas.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 1, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val uint_from_bytes_le_lemma: b:lseq uint8 16 -> Lemma (let lo = uint_from_bytes_le #U64 (sub b 0 8) in let hi = uint_from_bytes_le #U64 (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi + uint_v lo)
[]
Hacl.Impl.Poly1305.Lemmas.uint_from_bytes_le_lemma
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.Lemmas.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
b: Lib.Sequence.lseq Lib.IntTypes.uint8 16 -> FStar.Pervasives.Lemma (ensures (let lo = Lib.ByteSequence.uint_from_bytes_le (Lib.Sequence.sub b 0 8) in let hi = Lib.ByteSequence.uint_from_bytes_le (Lib.Sequence.sub b 8 8) in Lib.ByteSequence.nat_from_bytes_le b == Prims.pow2 64 * Lib.IntTypes.uint_v hi + Lib.IntTypes.uint_v lo))
{ "end_col": 67, "end_line": 27, "start_col": 32, "start_line": 17 }
FStar.Pervasives.Lemma
val uints_from_bytes_le_lemma64_2: b:lseq uint8 32 -> Lemma (let lo:lseq uint64 2 = uints_from_bytes_le (sub b 0 16) in let hi:lseq uint64 2 = uints_from_bytes_le (sub b 16 16) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v hi.[1] + uint_v hi.[0])
[ { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntVector", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let uints_from_bytes_le_lemma64_2 b = Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 0 16)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 16 16)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16)
val uints_from_bytes_le_lemma64_2: b:lseq uint8 32 -> Lemma (let lo:lseq uint64 2 = uints_from_bytes_le (sub b 0 16) in let hi:lseq uint64 2 = uints_from_bytes_le (sub b 16 16) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v hi.[1] + uint_v hi.[0]) let uints_from_bytes_le_lemma64_2 b =
false
null
true
Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 0 16)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 16 16)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16)
{ "checked_file": "Hacl.Impl.Poly1305.Lemmas.fst.checked", "dependencies": [ "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntVector.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.Lemmas.fst" }
[ "lemma" ]
[ "Lib.Sequence.lseq", "Lib.IntTypes.uint8", "Hacl.Impl.Poly1305.Lemmas.uint_from_bytes_le_lemma", "Lib.Sequence.sub", "Prims.unit", "FStar.Classical.forall_intro", "Lib.IntTypes.size_nat", "Prims.b2t", "Prims.op_LessThan", "Prims.eq2", "Lib.IntTypes.uint_t", "Lib.IntTypes.U64", "Lib.IntTypes.SEC", "Lib.Sequence.index", "Lib.ByteSequence.uints_from_bytes_le", "Lib.ByteSequence.uint_from_bytes_le", "Lib.IntTypes.U8", "FStar.Mul.op_Star", "Lib.IntTypes.numbytes", "Lib.ByteSequence.index_uints_from_bytes_le" ]
[]
module Hacl.Impl.Poly1305.Lemmas open FStar.Mul open Lib.IntTypes open Lib.IntVector open Lib.Sequence open Lib.ByteSequence #set-options "--z3rlimit 50 --max_fuel 1" val uint_from_bytes_le_lemma: b:lseq uint8 16 -> Lemma (let lo = uint_from_bytes_le #U64 (sub b 0 8) in let hi = uint_from_bytes_le #U64 (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi + uint_v lo) let uint_from_bytes_le_lemma b = let r1 = nat_from_bytes_le b in let r2 = uints_from_bytes_le #U64 #SEC #2 b in uints_from_bytes_le_nat_lemma #U64 #SEC #2 b; assert (r1 == nat_from_intseq_le r2); nat_from_intseq_le_slice_lemma #U64 #SEC #2 r2 1; assert (r1 == nat_from_intseq_le (Seq.slice r2 0 1) + pow2 64 * nat_from_intseq_le #U64 #SEC (Seq.slice r2 1 2)); nat_from_intseq_le_lemma0 (Seq.slice r2 0 1); nat_from_intseq_le_lemma0 (Seq.slice r2 1 2); assert (r1 == uint_v r2.[0] + pow2 64 * uint_v r2.[1]); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 b) val uints_from_bytes_le_lemma64_1: b:lseq uint8 16 -> Lemma (let lo:lseq uint64 1 = uints_from_bytes_le (sub b 0 8) in let hi:lseq uint64 1 = uints_from_bytes_le (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi.[0] + uint_v lo.[0]) let uints_from_bytes_le_lemma64_1 b = index_uints_from_bytes_le #U64 #SEC #1 (sub b 0 8) 0; index_uints_from_bytes_le #U64 #SEC #1 (sub b 8 8) 0; uint_from_bytes_le_lemma b val uints_from_bytes_le_lemma64_2: b:lseq uint8 32 -> Lemma (let lo:lseq uint64 2 = uints_from_bytes_le (sub b 0 16) in let hi:lseq uint64 2 = uints_from_bytes_le (sub b 16 16) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v hi.[1] + uint_v hi.[0])
false
false
Hacl.Impl.Poly1305.Lemmas.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 1, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val uints_from_bytes_le_lemma64_2: b:lseq uint8 32 -> Lemma (let lo:lseq uint64 2 = uints_from_bytes_le (sub b 0 16) in let hi:lseq uint64 2 = uints_from_bytes_le (sub b 16 16) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v hi.[1] + uint_v hi.[0])
[]
Hacl.Impl.Poly1305.Lemmas.uints_from_bytes_le_lemma64_2
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.Lemmas.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
b: Lib.Sequence.lseq Lib.IntTypes.uint8 32 -> FStar.Pervasives.Lemma (ensures (let lo = Lib.ByteSequence.uints_from_bytes_le (Lib.Sequence.sub b 0 16) in let hi = Lib.ByteSequence.uints_from_bytes_le (Lib.Sequence.sub b 16 16) in let b1 = Lib.ByteSequence.nat_from_bytes_le (Lib.Sequence.sub b 0 16) in let b2 = Lib.ByteSequence.nat_from_bytes_le (Lib.Sequence.sub b 16 16) in b1 == Prims.pow2 64 * Lib.IntTypes.uint_v lo.[ 1 ] + Lib.IntTypes.uint_v lo.[ 0 ] /\ b2 == Prims.pow2 64 * Lib.IntTypes.uint_v hi.[ 1 ] + Lib.IntTypes.uint_v hi.[ 0 ]))
{ "end_col": 40, "end_line": 53, "start_col": 2, "start_line": 50 }
FStar.Pervasives.Lemma
val uints_from_bytes_le_lemma64_4: b:lseq uint8 64 -> Lemma (let lo:lseq uint64 4 = uints_from_bytes_le (sub b 0 32) in let hi:lseq uint64 4 = uints_from_bytes_le (sub b 32 32) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in let b3 = nat_from_bytes_le (sub b 32 16) in let b4 = nat_from_bytes_le (sub b 48 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v lo.[3] + uint_v lo.[2] /\ b3 == pow2 64 * uint_v hi.[1] + uint_v hi.[0] /\ b4 == pow2 64 * uint_v hi.[3] + uint_v hi.[2])
[ { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntVector", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let uints_from_bytes_le_lemma64_4 b = Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 0 32)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 32 32)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16); uint_from_bytes_le_lemma (sub b 32 16); uint_from_bytes_le_lemma (sub b 48 16)
val uints_from_bytes_le_lemma64_4: b:lseq uint8 64 -> Lemma (let lo:lseq uint64 4 = uints_from_bytes_le (sub b 0 32) in let hi:lseq uint64 4 = uints_from_bytes_le (sub b 32 32) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in let b3 = nat_from_bytes_le (sub b 32 16) in let b4 = nat_from_bytes_le (sub b 48 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v lo.[3] + uint_v lo.[2] /\ b3 == pow2 64 * uint_v hi.[1] + uint_v hi.[0] /\ b4 == pow2 64 * uint_v hi.[3] + uint_v hi.[2]) let uints_from_bytes_le_lemma64_4 b =
false
null
true
Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 0 32)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #4 (sub b 32 32)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16); uint_from_bytes_le_lemma (sub b 32 16); uint_from_bytes_le_lemma (sub b 48 16)
{ "checked_file": "Hacl.Impl.Poly1305.Lemmas.fst.checked", "dependencies": [ "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntVector.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.Lemmas.fst" }
[ "lemma" ]
[ "Lib.Sequence.lseq", "Lib.IntTypes.uint8", "Hacl.Impl.Poly1305.Lemmas.uint_from_bytes_le_lemma", "Lib.Sequence.sub", "Prims.unit", "FStar.Classical.forall_intro", "Lib.IntTypes.size_nat", "Prims.b2t", "Prims.op_LessThan", "Prims.eq2", "Lib.IntTypes.uint_t", "Lib.IntTypes.U64", "Lib.IntTypes.SEC", "Lib.Sequence.index", "Lib.ByteSequence.uints_from_bytes_le", "Lib.ByteSequence.uint_from_bytes_le", "Lib.IntTypes.U8", "FStar.Mul.op_Star", "Lib.IntTypes.numbytes", "Lib.ByteSequence.index_uints_from_bytes_le" ]
[]
module Hacl.Impl.Poly1305.Lemmas open FStar.Mul open Lib.IntTypes open Lib.IntVector open Lib.Sequence open Lib.ByteSequence #set-options "--z3rlimit 50 --max_fuel 1" val uint_from_bytes_le_lemma: b:lseq uint8 16 -> Lemma (let lo = uint_from_bytes_le #U64 (sub b 0 8) in let hi = uint_from_bytes_le #U64 (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi + uint_v lo) let uint_from_bytes_le_lemma b = let r1 = nat_from_bytes_le b in let r2 = uints_from_bytes_le #U64 #SEC #2 b in uints_from_bytes_le_nat_lemma #U64 #SEC #2 b; assert (r1 == nat_from_intseq_le r2); nat_from_intseq_le_slice_lemma #U64 #SEC #2 r2 1; assert (r1 == nat_from_intseq_le (Seq.slice r2 0 1) + pow2 64 * nat_from_intseq_le #U64 #SEC (Seq.slice r2 1 2)); nat_from_intseq_le_lemma0 (Seq.slice r2 0 1); nat_from_intseq_le_lemma0 (Seq.slice r2 1 2); assert (r1 == uint_v r2.[0] + pow2 64 * uint_v r2.[1]); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 b) val uints_from_bytes_le_lemma64_1: b:lseq uint8 16 -> Lemma (let lo:lseq uint64 1 = uints_from_bytes_le (sub b 0 8) in let hi:lseq uint64 1 = uints_from_bytes_le (sub b 8 8) in nat_from_bytes_le b == pow2 64 * uint_v hi.[0] + uint_v lo.[0]) let uints_from_bytes_le_lemma64_1 b = index_uints_from_bytes_le #U64 #SEC #1 (sub b 0 8) 0; index_uints_from_bytes_le #U64 #SEC #1 (sub b 8 8) 0; uint_from_bytes_le_lemma b val uints_from_bytes_le_lemma64_2: b:lseq uint8 32 -> Lemma (let lo:lseq uint64 2 = uints_from_bytes_le (sub b 0 16) in let hi:lseq uint64 2 = uints_from_bytes_le (sub b 16 16) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v hi.[1] + uint_v hi.[0]) let uints_from_bytes_le_lemma64_2 b = Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 0 16)); Classical.forall_intro (index_uints_from_bytes_le #U64 #SEC #2 (sub b 16 16)); uint_from_bytes_le_lemma (sub b 0 16); uint_from_bytes_le_lemma (sub b 16 16) val uints_from_bytes_le_lemma64_4: b:lseq uint8 64 -> Lemma (let lo:lseq uint64 4 = uints_from_bytes_le (sub b 0 32) in let hi:lseq uint64 4 = uints_from_bytes_le (sub b 32 32) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in let b3 = nat_from_bytes_le (sub b 32 16) in let b4 = nat_from_bytes_le (sub b 48 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v lo.[3] + uint_v lo.[2] /\ b3 == pow2 64 * uint_v hi.[1] + uint_v hi.[0] /\ b4 == pow2 64 * uint_v hi.[3] + uint_v hi.[2])
false
false
Hacl.Impl.Poly1305.Lemmas.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 1, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val uints_from_bytes_le_lemma64_4: b:lseq uint8 64 -> Lemma (let lo:lseq uint64 4 = uints_from_bytes_le (sub b 0 32) in let hi:lseq uint64 4 = uints_from_bytes_le (sub b 32 32) in let b1 = nat_from_bytes_le (sub b 0 16) in let b2 = nat_from_bytes_le (sub b 16 16) in let b3 = nat_from_bytes_le (sub b 32 16) in let b4 = nat_from_bytes_le (sub b 48 16) in b1 == pow2 64 * uint_v lo.[1] + uint_v lo.[0] /\ b2 == pow2 64 * uint_v lo.[3] + uint_v lo.[2] /\ b3 == pow2 64 * uint_v hi.[1] + uint_v hi.[0] /\ b4 == pow2 64 * uint_v hi.[3] + uint_v hi.[2])
[]
Hacl.Impl.Poly1305.Lemmas.uints_from_bytes_le_lemma64_4
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.Lemmas.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
b: Lib.Sequence.lseq Lib.IntTypes.uint8 64 -> FStar.Pervasives.Lemma (ensures (let lo = Lib.ByteSequence.uints_from_bytes_le (Lib.Sequence.sub b 0 32) in let hi = Lib.ByteSequence.uints_from_bytes_le (Lib.Sequence.sub b 32 32) in let b1 = Lib.ByteSequence.nat_from_bytes_le (Lib.Sequence.sub b 0 16) in let b2 = Lib.ByteSequence.nat_from_bytes_le (Lib.Sequence.sub b 16 16) in let b3 = Lib.ByteSequence.nat_from_bytes_le (Lib.Sequence.sub b 32 16) in let b4 = Lib.ByteSequence.nat_from_bytes_le (Lib.Sequence.sub b 48 16) in b1 == Prims.pow2 64 * Lib.IntTypes.uint_v lo.[ 1 ] + Lib.IntTypes.uint_v lo.[ 0 ] /\ b2 == Prims.pow2 64 * Lib.IntTypes.uint_v lo.[ 3 ] + Lib.IntTypes.uint_v lo.[ 2 ] /\ b3 == Prims.pow2 64 * Lib.IntTypes.uint_v hi.[ 1 ] + Lib.IntTypes.uint_v hi.[ 0 ] /\ b4 == Prims.pow2 64 * Lib.IntTypes.uint_v hi.[ 3 ] + Lib.IntTypes.uint_v hi.[ 2 ]))
{ "end_col": 40, "end_line": 74, "start_col": 2, "start_line": 69 }
Prims.Tot
val crypto_kem_keypair: a:frodo_alg -> gen_a:frodo_gen_a -> state:Spec.Frodo.Random.state_t -> lbytes (crypto_publickeybytes a) & lbytes (crypto_secretkeybytes a)
[ { "abbrev": true, "full_module": "Spec.Frodo.KEM.Decaps", "short_module": "Decaps" }, { "abbrev": true, "full_module": "Spec.Frodo.KEM.Encaps", "short_module": "Encaps" }, { "abbrev": true, "full_module": "Spec.Frodo.KEM.KeyGen", "short_module": "KeyGen" }, { "abbrev": false, "full_module": "Spec.Frodo.Params", "short_module": null }, { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Spec.Frodo", "short_module": null }, { "abbrev": false, "full_module": "Spec.Frodo", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let crypto_kem_keypair a gen_a state = KeyGen.crypto_kem_keypair a gen_a state
val crypto_kem_keypair: a:frodo_alg -> gen_a:frodo_gen_a -> state:Spec.Frodo.Random.state_t -> lbytes (crypto_publickeybytes a) & lbytes (crypto_secretkeybytes a) let crypto_kem_keypair a gen_a state =
false
null
false
KeyGen.crypto_kem_keypair a gen_a state
{ "checked_file": "Spec.Frodo.KEM.fst.checked", "dependencies": [ "Spec.Frodo.Random.fst.checked", "Spec.Frodo.Params.fst.checked", "Spec.Frodo.KEM.KeyGen.fst.checked", "Spec.Frodo.KEM.Encaps.fst.checked", "Spec.Frodo.KEM.Decaps.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked" ], "interface_file": false, "source_file": "Spec.Frodo.KEM.fst" }
[ "total" ]
[ "Spec.Frodo.Params.frodo_alg", "Spec.Frodo.Params.frodo_gen_a", "Spec.Frodo.Random.state_t", "Spec.Frodo.KEM.KeyGen.crypto_kem_keypair", "FStar.Pervasives.Native.tuple2", "Lib.ByteSequence.lbytes", "Spec.Frodo.Params.crypto_publickeybytes", "Spec.Frodo.Params.crypto_secretkeybytes" ]
[]
module Spec.Frodo.KEM open FStar.Mul open Lib.IntTypes open Lib.Sequence open Lib.ByteSequence open Spec.Frodo.Params module KeyGen = Spec.Frodo.KEM.KeyGen module Encaps = Spec.Frodo.KEM.Encaps module Decaps = Spec.Frodo.KEM.Decaps #set-options "--z3rlimit 50 --fuel 0 --ifuel 0" val crypto_kem_keypair: a:frodo_alg -> gen_a:frodo_gen_a -> state:Spec.Frodo.Random.state_t -> lbytes (crypto_publickeybytes a) & lbytes (crypto_secretkeybytes a)
false
false
Spec.Frodo.KEM.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 0, "initial_ifuel": 0, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val crypto_kem_keypair: a:frodo_alg -> gen_a:frodo_gen_a -> state:Spec.Frodo.Random.state_t -> lbytes (crypto_publickeybytes a) & lbytes (crypto_secretkeybytes a)
[]
Spec.Frodo.KEM.crypto_kem_keypair
{ "file_name": "specs/frodo/Spec.Frodo.KEM.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Frodo.Params.frodo_alg -> gen_a: Spec.Frodo.Params.frodo_gen_a -> state: Spec.Frodo.Random.state_t -> Lib.ByteSequence.lbytes (Spec.Frodo.Params.crypto_publickeybytes a) * Lib.ByteSequence.lbytes (Spec.Frodo.Params.crypto_secretkeybytes a)
{ "end_col": 78, "end_line": 23, "start_col": 39, "start_line": 23 }
Prims.Tot
val crypto_kem_enc: a:frodo_alg -> gen_a:frodo_gen_a -> state:Spec.Frodo.Random.state_t -> pk:lbytes (crypto_publickeybytes a) -> lbytes (crypto_ciphertextbytes a) & lbytes (crypto_bytes a)
[ { "abbrev": true, "full_module": "Spec.Frodo.KEM.Decaps", "short_module": "Decaps" }, { "abbrev": true, "full_module": "Spec.Frodo.KEM.Encaps", "short_module": "Encaps" }, { "abbrev": true, "full_module": "Spec.Frodo.KEM.KeyGen", "short_module": "KeyGen" }, { "abbrev": false, "full_module": "Spec.Frodo.Params", "short_module": null }, { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Spec.Frodo", "short_module": null }, { "abbrev": false, "full_module": "Spec.Frodo", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let crypto_kem_enc a gen_a state pk = Encaps.crypto_kem_enc a gen_a state pk
val crypto_kem_enc: a:frodo_alg -> gen_a:frodo_gen_a -> state:Spec.Frodo.Random.state_t -> pk:lbytes (crypto_publickeybytes a) -> lbytes (crypto_ciphertextbytes a) & lbytes (crypto_bytes a) let crypto_kem_enc a gen_a state pk =
false
null
false
Encaps.crypto_kem_enc a gen_a state pk
{ "checked_file": "Spec.Frodo.KEM.fst.checked", "dependencies": [ "Spec.Frodo.Random.fst.checked", "Spec.Frodo.Params.fst.checked", "Spec.Frodo.KEM.KeyGen.fst.checked", "Spec.Frodo.KEM.Encaps.fst.checked", "Spec.Frodo.KEM.Decaps.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked" ], "interface_file": false, "source_file": "Spec.Frodo.KEM.fst" }
[ "total" ]
[ "Spec.Frodo.Params.frodo_alg", "Spec.Frodo.Params.frodo_gen_a", "Spec.Frodo.Random.state_t", "Lib.ByteSequence.lbytes", "Spec.Frodo.Params.crypto_publickeybytes", "Spec.Frodo.KEM.Encaps.crypto_kem_enc", "FStar.Pervasives.Native.tuple2", "Spec.Frodo.Params.crypto_ciphertextbytes", "Spec.Frodo.Params.crypto_bytes" ]
[]
module Spec.Frodo.KEM open FStar.Mul open Lib.IntTypes open Lib.Sequence open Lib.ByteSequence open Spec.Frodo.Params module KeyGen = Spec.Frodo.KEM.KeyGen module Encaps = Spec.Frodo.KEM.Encaps module Decaps = Spec.Frodo.KEM.Decaps #set-options "--z3rlimit 50 --fuel 0 --ifuel 0" val crypto_kem_keypair: a:frodo_alg -> gen_a:frodo_gen_a -> state:Spec.Frodo.Random.state_t -> lbytes (crypto_publickeybytes a) & lbytes (crypto_secretkeybytes a) let crypto_kem_keypair a gen_a state = KeyGen.crypto_kem_keypair a gen_a state val crypto_kem_enc: a:frodo_alg -> gen_a:frodo_gen_a -> state:Spec.Frodo.Random.state_t -> pk:lbytes (crypto_publickeybytes a) -> lbytes (crypto_ciphertextbytes a) & lbytes (crypto_bytes a)
false
false
Spec.Frodo.KEM.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 0, "initial_ifuel": 0, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val crypto_kem_enc: a:frodo_alg -> gen_a:frodo_gen_a -> state:Spec.Frodo.Random.state_t -> pk:lbytes (crypto_publickeybytes a) -> lbytes (crypto_ciphertextbytes a) & lbytes (crypto_bytes a)
[]
Spec.Frodo.KEM.crypto_kem_enc
{ "file_name": "specs/frodo/Spec.Frodo.KEM.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Frodo.Params.frodo_alg -> gen_a: Spec.Frodo.Params.frodo_gen_a -> state: Spec.Frodo.Random.state_t -> pk: Lib.ByteSequence.lbytes (Spec.Frodo.Params.crypto_publickeybytes a) -> Lib.ByteSequence.lbytes (Spec.Frodo.Params.crypto_ciphertextbytes a) * Lib.ByteSequence.lbytes (Spec.Frodo.Params.crypto_bytes a)
{ "end_col": 76, "end_line": 33, "start_col": 38, "start_line": 33 }
Prims.Tot
val crypto_kem_dec: a:frodo_alg -> gen_a:frodo_gen_a -> ct:lbytes (crypto_ciphertextbytes a) -> sk:lbytes (crypto_secretkeybytes a) -> lbytes (crypto_bytes a)
[ { "abbrev": true, "full_module": "Spec.Frodo.KEM.Decaps", "short_module": "Decaps" }, { "abbrev": true, "full_module": "Spec.Frodo.KEM.Encaps", "short_module": "Encaps" }, { "abbrev": true, "full_module": "Spec.Frodo.KEM.KeyGen", "short_module": "KeyGen" }, { "abbrev": false, "full_module": "Spec.Frodo.Params", "short_module": null }, { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Spec.Frodo", "short_module": null }, { "abbrev": false, "full_module": "Spec.Frodo", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let crypto_kem_dec a gen_a ct sk = Decaps.crypto_kem_dec a gen_a ct sk
val crypto_kem_dec: a:frodo_alg -> gen_a:frodo_gen_a -> ct:lbytes (crypto_ciphertextbytes a) -> sk:lbytes (crypto_secretkeybytes a) -> lbytes (crypto_bytes a) let crypto_kem_dec a gen_a ct sk =
false
null
false
Decaps.crypto_kem_dec a gen_a ct sk
{ "checked_file": "Spec.Frodo.KEM.fst.checked", "dependencies": [ "Spec.Frodo.Random.fst.checked", "Spec.Frodo.Params.fst.checked", "Spec.Frodo.KEM.KeyGen.fst.checked", "Spec.Frodo.KEM.Encaps.fst.checked", "Spec.Frodo.KEM.Decaps.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked" ], "interface_file": false, "source_file": "Spec.Frodo.KEM.fst" }
[ "total" ]
[ "Spec.Frodo.Params.frodo_alg", "Spec.Frodo.Params.frodo_gen_a", "Lib.ByteSequence.lbytes", "Spec.Frodo.Params.crypto_ciphertextbytes", "Spec.Frodo.Params.crypto_secretkeybytes", "Spec.Frodo.KEM.Decaps.crypto_kem_dec", "Spec.Frodo.Params.crypto_bytes" ]
[]
module Spec.Frodo.KEM open FStar.Mul open Lib.IntTypes open Lib.Sequence open Lib.ByteSequence open Spec.Frodo.Params module KeyGen = Spec.Frodo.KEM.KeyGen module Encaps = Spec.Frodo.KEM.Encaps module Decaps = Spec.Frodo.KEM.Decaps #set-options "--z3rlimit 50 --fuel 0 --ifuel 0" val crypto_kem_keypair: a:frodo_alg -> gen_a:frodo_gen_a -> state:Spec.Frodo.Random.state_t -> lbytes (crypto_publickeybytes a) & lbytes (crypto_secretkeybytes a) let crypto_kem_keypair a gen_a state = KeyGen.crypto_kem_keypair a gen_a state val crypto_kem_enc: a:frodo_alg -> gen_a:frodo_gen_a -> state:Spec.Frodo.Random.state_t -> pk:lbytes (crypto_publickeybytes a) -> lbytes (crypto_ciphertextbytes a) & lbytes (crypto_bytes a) let crypto_kem_enc a gen_a state pk = Encaps.crypto_kem_enc a gen_a state pk val crypto_kem_dec: a:frodo_alg -> gen_a:frodo_gen_a -> ct:lbytes (crypto_ciphertextbytes a) -> sk:lbytes (crypto_secretkeybytes a) -> lbytes (crypto_bytes a)
false
false
Spec.Frodo.KEM.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 0, "initial_ifuel": 0, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val crypto_kem_dec: a:frodo_alg -> gen_a:frodo_gen_a -> ct:lbytes (crypto_ciphertextbytes a) -> sk:lbytes (crypto_secretkeybytes a) -> lbytes (crypto_bytes a)
[]
Spec.Frodo.KEM.crypto_kem_dec
{ "file_name": "specs/frodo/Spec.Frodo.KEM.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Frodo.Params.frodo_alg -> gen_a: Spec.Frodo.Params.frodo_gen_a -> ct: Lib.ByteSequence.lbytes (Spec.Frodo.Params.crypto_ciphertextbytes a) -> sk: Lib.ByteSequence.lbytes (Spec.Frodo.Params.crypto_secretkeybytes a) -> Lib.ByteSequence.lbytes (Spec.Frodo.Params.crypto_bytes a)
{ "end_col": 70, "end_line": 43, "start_col": 35, "start_line": 43 }
Prims.Tot
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a}
let block_w (a: sha2_alg) =
false
null
false
m: S.seq (word a) {S.length m = block_word_length a}
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "FStar.Seq.Base.seq", "Spec.Hash.Definitions.word", "Prims.b2t", "Prims.op_Equality", "Prims.int", "FStar.Seq.Base.length", "Spec.Hash.Definitions.block_word_length" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x
false
true
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val block_w : a: Spec.Hash.Definitions.sha2_alg -> Type0
[]
Spec.SHA2.block_w
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> Type0
{ "end_col": 80, "end_line": 34, "start_col": 29, "start_line": 34 }
Prims.Tot
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let shuffle_core_pre = shuffle_core_pre_
let shuffle_core_pre =
false
null
false
shuffle_core_pre_
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.SHA2.shuffle_core_pre_" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *) let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val shuffle_core_pre : a: Spec.Hash.Definitions.sha2_alg -> k_t: Spec.Hash.Definitions.word a -> ws_t: Spec.Hash.Definitions.word a -> hash: Spec.Hash.Definitions.words_state a -> Spec.Hash.Definitions.words_state a
[]
Spec.SHA2.shuffle_core_pre
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> k_t: Spec.Hash.Definitions.word a -> ws_t: Spec.Hash.Definitions.word a -> hash: Spec.Hash.Definitions.words_state a -> Spec.Hash.Definitions.words_state a
{ "end_col": 40, "end_line": 173, "start_col": 23, "start_line": 173 }
Prims.Tot
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let shuffle = shuffle_pre
let shuffle =
false
null
false
shuffle_pre
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.SHA2.shuffle_pre" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *) let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l [@"opaque_to_smt"] let shuffle_core_pre = shuffle_core_pre_ (* Scheduling function *) (* Incremental Version *) let ws0_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < block_word_length a}) (ws:k_w a) : k_w a = Seq.upd ws i (Seq.index block i) let wsi_pre_inner (a:sha2_alg) (i:nat{i >= block_word_length a /\ i < size_k_w a}) (ws:k_w a) : k_w a = let t16 = ws.[i - 16] in let t15 = ws.[i - 15] in let t7 = ws.[i - 7] in let t2 = ws.[i - 2] in let s1 = _sigma1 a t2 in let s0 = _sigma0 a t15 in Seq.upd ws i (s1 +. t7 +. s0 +. t16) let ws_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < size_k_w a}) (ws:k_w a) : k_w a = if i < block_word_length a then ws0_pre_inner a block i ws else wsi_pre_inner a i ws let ws_pre_ (a:sha2_alg) (block:block_w a) : k_w a = Lib.LoopCombinators.repeati (size_k_w a) (ws_pre_inner a block) (Seq.create (size_k_w a) (to_word a 0)) [@"opaque_to_smt"] let ws_pre = ws_pre_ (* Full shuffling function *) let shuffle_pre (a:sha2_alg) (hash:words_state a) (block:block_w a): Tot (words_state a) = let ws = ws_pre a block in let k = k0 a in Lib.LoopCombinators.repeati (size_k_w a) (fun i h -> shuffle_core_pre a k.[i] ws.[i] h) hash
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val shuffle : a: Spec.Hash.Definitions.sha2_alg -> hash: Spec.Hash.Definitions.words_state a -> block: Spec.SHA2.block_w a -> Spec.Hash.Definitions.words_state a
[]
Spec.SHA2.shuffle
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> hash: Spec.Hash.Definitions.words_state a -> block: Spec.SHA2.block_w a -> Spec.Hash.Definitions.words_state a
{ "end_col": 25, "end_line": 210, "start_col": 14, "start_line": 210 }
Prims.Tot
val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a)
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z))
val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) let _Maj a x y z =
false
null
false
(x &. y) ^. ((x &. z) ^. (y &. z))
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.Hash.Definitions.word", "Spec.SHA2.op_Hat_Dot", "Spec.SHA2.op_Amp_Dot" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a)
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a)
[]
Spec.SHA2._Maj
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> x: Spec.Hash.Definitions.word a -> y: Spec.Hash.Definitions.word a -> z: Spec.Hash.Definitions.word a -> Spec.Hash.Definitions.word a
{ "end_col": 53, "end_line": 115, "start_col": 19, "start_line": 115 }
Prims.Tot
val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a)
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let _Ch a x y z = (x &. y) ^. (~.x &. z)
val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) let _Ch a x y z =
false
null
false
(x &. y) ^. (~.x &. z)
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.Hash.Definitions.word", "Spec.SHA2.op_Hat_Dot", "Spec.SHA2.op_Amp_Dot", "Spec.SHA2.op_Tilde_Dot" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a)
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a)
[]
Spec.SHA2._Ch
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> x: Spec.Hash.Definitions.word a -> y: Spec.Hash.Definitions.word a -> z: Spec.Hash.Definitions.word a -> Spec.Hash.Definitions.word a
{ "end_col": 41, "end_line": 110, "start_col": 19, "start_line": 110 }
Prims.Tot
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let counter = nat
let counter =
false
null
false
nat
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Prims.nat" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a}
false
true
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val counter : Type0
[]
Spec.SHA2.counter
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
Type0
{ "end_col": 17, "end_line": 35, "start_col": 14, "start_line": 35 }
Prims.Tot
val op384_512:ops
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul }
val op384_512:ops let op384_512:ops =
false
null
false
{ c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul }
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.SHA2.Mkops", "FStar.UInt32.__uint_to_t" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction
false
true
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val op384_512:ops
[]
Spec.SHA2.op384_512
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
Spec.SHA2.ops
{ "end_col": 33, "end_line": 59, "start_col": 2, "start_line": 56 }
Prims.Tot
val op224_256:ops
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul }
val op224_256:ops let op224_256:ops =
false
null
false
{ c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul }
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.SHA2.Mkops", "FStar.UInt32.__uint_to_t" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction
false
true
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val op224_256:ops
[]
Spec.SHA2.op224_256
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
Spec.SHA2.ops
{ "end_col": 33, "end_line": 51, "start_col": 2, "start_line": 48 }
Prims.Tot
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let ws_pre = ws_pre_
let ws_pre =
false
null
false
ws_pre_
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.SHA2.ws_pre_" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *) let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l [@"opaque_to_smt"] let shuffle_core_pre = shuffle_core_pre_ (* Scheduling function *) (* Incremental Version *) let ws0_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < block_word_length a}) (ws:k_w a) : k_w a = Seq.upd ws i (Seq.index block i) let wsi_pre_inner (a:sha2_alg) (i:nat{i >= block_word_length a /\ i < size_k_w a}) (ws:k_w a) : k_w a = let t16 = ws.[i - 16] in let t15 = ws.[i - 15] in let t7 = ws.[i - 7] in let t2 = ws.[i - 2] in let s1 = _sigma1 a t2 in let s0 = _sigma0 a t15 in Seq.upd ws i (s1 +. t7 +. s0 +. t16) let ws_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < size_k_w a}) (ws:k_w a) : k_w a = if i < block_word_length a then ws0_pre_inner a block i ws else wsi_pre_inner a i ws let ws_pre_ (a:sha2_alg) (block:block_w a) : k_w a = Lib.LoopCombinators.repeati (size_k_w a) (ws_pre_inner a block) (Seq.create (size_k_w a) (to_word a 0))
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val ws_pre : a: Spec.Hash.Definitions.sha2_alg -> block: Spec.SHA2.block_w a -> Spec.SHA2.k_w a
[]
Spec.SHA2.ws_pre
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> block: Spec.SHA2.block_w a -> Spec.SHA2.k_w a
{ "end_col": 20, "end_line": 201, "start_col": 13, "start_line": 201 }
Prims.Tot
val init: a:sha2_alg -> init_t a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let init a = h0 a
val init: a:sha2_alg -> init_t a let init a =
false
null
false
h0 a
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.SHA2.h0", "Spec.Hash.Definitions.init_t" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *) let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l [@"opaque_to_smt"] let shuffle_core_pre = shuffle_core_pre_ (* Scheduling function *) (* Incremental Version *) let ws0_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < block_word_length a}) (ws:k_w a) : k_w a = Seq.upd ws i (Seq.index block i) let wsi_pre_inner (a:sha2_alg) (i:nat{i >= block_word_length a /\ i < size_k_w a}) (ws:k_w a) : k_w a = let t16 = ws.[i - 16] in let t15 = ws.[i - 15] in let t7 = ws.[i - 7] in let t2 = ws.[i - 2] in let s1 = _sigma1 a t2 in let s0 = _sigma0 a t15 in Seq.upd ws i (s1 +. t7 +. s0 +. t16) let ws_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < size_k_w a}) (ws:k_w a) : k_w a = if i < block_word_length a then ws0_pre_inner a block i ws else wsi_pre_inner a i ws let ws_pre_ (a:sha2_alg) (block:block_w a) : k_w a = Lib.LoopCombinators.repeati (size_k_w a) (ws_pre_inner a block) (Seq.create (size_k_w a) (to_word a 0)) [@"opaque_to_smt"] let ws_pre = ws_pre_ (* Full shuffling function *) let shuffle_pre (a:sha2_alg) (hash:words_state a) (block:block_w a): Tot (words_state a) = let ws = ws_pre a block in let k = k0 a in Lib.LoopCombinators.repeati (size_k_w a) (fun i h -> shuffle_core_pre a k.[i] ws.[i] h) hash [@"opaque_to_smt"] let shuffle = shuffle_pre
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val init: a:sha2_alg -> init_t a
[]
Spec.SHA2.init
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> Spec.Hash.Definitions.init_t a
{ "end_col": 17, "end_line": 212, "start_col": 13, "start_line": 212 }
Prims.Tot
val size_k_w: sha2_alg -> Tot nat
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80
val size_k_w: sha2_alg -> Tot nat let size_k_w: sha2_alg -> Tot nat =
false
null
false
function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Prims.nat" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *)
false
true
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val size_k_w: sha2_alg -> Tot nat
[]
Spec.SHA2.size_k_w
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
_: Spec.Hash.Definitions.sha2_alg -> Prims.nat
{ "end_col": 29, "end_line": 16, "start_col": 36, "start_line": 14 }
Prims.Tot
val word_n: sha2_alg -> Tot nat
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64
val word_n: sha2_alg -> Tot nat let word_n: sha2_alg -> Tot nat =
false
null
false
function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Prims.nat" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80
false
true
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val word_n: sha2_alg -> Tot nat
[]
Spec.SHA2.word_n
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
_: Spec.Hash.Definitions.sha2_alg -> Prims.nat
{ "end_col": 29, "end_line": 21, "start_col": 34, "start_line": 19 }
Prims.Tot
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a}
let k_w (a: sha2_alg) =
false
null
false
m: S.seq (word a) {S.length m = size_k_w a}
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "FStar.Seq.Base.seq", "Spec.Hash.Definitions.word", "Prims.b2t", "Prims.op_Equality", "Prims.nat", "FStar.Seq.Base.length", "Spec.SHA2.size_k_w" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x
false
true
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val k_w : a: Spec.Hash.Definitions.sha2_alg -> Type0
[]
Spec.SHA2.k_w
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> Type0
{ "end_col": 71, "end_line": 33, "start_col": 29, "start_line": 33 }
Prims.Tot
val op_Tilde_Dot: #a: sha2_alg -> word a -> word a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC
val op_Tilde_Dot: #a: sha2_alg -> word a -> word a let op_Tilde_Dot (#a: sha2_alg) : word a -> word a =
false
null
false
match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Lib.IntTypes.op_Tilde_Dot", "Lib.IntTypes.U32", "Lib.IntTypes.SEC", "Lib.IntTypes.U64", "Spec.Hash.Definitions.word" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val op_Tilde_Dot: #a: sha2_alg -> word a -> word a
[]
Spec.SHA2.op_Tilde_Dot
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
_: Spec.Hash.Definitions.word a -> Spec.Hash.Definitions.word a
{ "end_col": 43, "end_line": 92, "start_col": 2, "start_line": 90 }
Prims.Tot
val op0 (a: sha2_alg) : Tot ops
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512
val op0 (a: sha2_alg) : Tot ops let op0: a: sha2_alg -> Tot ops =
false
null
false
function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.SHA2.op224_256", "Spec.SHA2.op384_512", "Spec.SHA2.ops" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul }
false
true
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val op0 (a: sha2_alg) : Tot ops
[]
Spec.SHA2.op0
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> Spec.SHA2.ops
{ "end_col": 25, "end_line": 67, "start_col": 33, "start_line": 63 }
Prims.Tot
val shuffle_pre (a: sha2_alg) (hash: words_state a) (block: block_w a) : Tot (words_state a)
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let shuffle_pre (a:sha2_alg) (hash:words_state a) (block:block_w a): Tot (words_state a) = let ws = ws_pre a block in let k = k0 a in Lib.LoopCombinators.repeati (size_k_w a) (fun i h -> shuffle_core_pre a k.[i] ws.[i] h) hash
val shuffle_pre (a: sha2_alg) (hash: words_state a) (block: block_w a) : Tot (words_state a) let shuffle_pre (a: sha2_alg) (hash: words_state a) (block: block_w a) : Tot (words_state a) =
false
null
false
let ws = ws_pre a block in let k = k0 a in Lib.LoopCombinators.repeati (size_k_w a) (fun i h -> shuffle_core_pre a k.[ i ] ws.[ i ] h) hash
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.Hash.Definitions.words_state", "Spec.SHA2.block_w", "Lib.LoopCombinators.repeati", "Spec.SHA2.size_k_w", "Prims.nat", "Prims.b2t", "Prims.op_LessThan", "Spec.SHA2.shuffle_core_pre", "Spec.SHA2.op_String_Access", "Spec.Hash.Definitions.word", "FStar.Seq.Base.seq", "Prims.op_Equality", "FStar.Seq.Base.length", "Spec.SHA2.k0", "Spec.SHA2.k_w", "Spec.SHA2.ws_pre" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *) let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l [@"opaque_to_smt"] let shuffle_core_pre = shuffle_core_pre_ (* Scheduling function *) (* Incremental Version *) let ws0_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < block_word_length a}) (ws:k_w a) : k_w a = Seq.upd ws i (Seq.index block i) let wsi_pre_inner (a:sha2_alg) (i:nat{i >= block_word_length a /\ i < size_k_w a}) (ws:k_w a) : k_w a = let t16 = ws.[i - 16] in let t15 = ws.[i - 15] in let t7 = ws.[i - 7] in let t2 = ws.[i - 2] in let s1 = _sigma1 a t2 in let s0 = _sigma0 a t15 in Seq.upd ws i (s1 +. t7 +. s0 +. t16) let ws_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < size_k_w a}) (ws:k_w a) : k_w a = if i < block_word_length a then ws0_pre_inner a block i ws else wsi_pre_inner a i ws let ws_pre_ (a:sha2_alg) (block:block_w a) : k_w a = Lib.LoopCombinators.repeati (size_k_w a) (ws_pre_inner a block) (Seq.create (size_k_w a) (to_word a 0)) [@"opaque_to_smt"] let ws_pre = ws_pre_
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val shuffle_pre (a: sha2_alg) (hash: words_state a) (block: block_w a) : Tot (words_state a)
[]
Spec.SHA2.shuffle_pre
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> hash: Spec.Hash.Definitions.words_state a -> block: Spec.SHA2.block_w a -> Spec.Hash.Definitions.words_state a
{ "end_col": 55, "end_line": 208, "start_col": 90, "start_line": 204 }
Prims.Tot
val ws_pre_ (a: sha2_alg) (block: block_w a) : k_w a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let ws_pre_ (a:sha2_alg) (block:block_w a) : k_w a = Lib.LoopCombinators.repeati (size_k_w a) (ws_pre_inner a block) (Seq.create (size_k_w a) (to_word a 0))
val ws_pre_ (a: sha2_alg) (block: block_w a) : k_w a let ws_pre_ (a: sha2_alg) (block: block_w a) : k_w a =
false
null
false
Lib.LoopCombinators.repeati (size_k_w a) (ws_pre_inner a block) (Seq.create (size_k_w a) (to_word a 0))
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.SHA2.block_w", "Lib.LoopCombinators.repeati", "Spec.SHA2.k_w", "Spec.SHA2.size_k_w", "Spec.SHA2.ws_pre_inner", "FStar.Seq.Base.create", "Spec.Hash.Definitions.word", "Spec.SHA2.to_word" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *) let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l [@"opaque_to_smt"] let shuffle_core_pre = shuffle_core_pre_ (* Scheduling function *) (* Incremental Version *) let ws0_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < block_word_length a}) (ws:k_w a) : k_w a = Seq.upd ws i (Seq.index block i) let wsi_pre_inner (a:sha2_alg) (i:nat{i >= block_word_length a /\ i < size_k_w a}) (ws:k_w a) : k_w a = let t16 = ws.[i - 16] in let t15 = ws.[i - 15] in let t7 = ws.[i - 7] in let t2 = ws.[i - 2] in let s1 = _sigma1 a t2 in let s0 = _sigma0 a t15 in Seq.upd ws i (s1 +. t7 +. s0 +. t16) let ws_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < size_k_w a}) (ws:k_w a) : k_w a = if i < block_word_length a then ws0_pre_inner a block i ws else wsi_pre_inner a i ws
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val ws_pre_ (a: sha2_alg) (block: block_w a) : k_w a
[]
Spec.SHA2.ws_pre_
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> block: Spec.SHA2.block_w a -> Spec.SHA2.k_w a
{ "end_col": 105, "end_line": 198, "start_col": 2, "start_line": 198 }
Prims.Tot
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x
let v' (#a: sha2_alg) (x: word a) =
false
null
false
match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.Hash.Definitions.word", "Lib.IntTypes.uint_v", "Lib.IntTypes.U32", "Lib.IntTypes.SEC", "Lib.IntTypes.U64", "Prims.int", "Prims.l_or", "Lib.IntTypes.range" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val v' : x: Spec.Hash.Definitions.word a -> x: Prims.int { Lib.IntTypes.range x Lib.IntTypes.U32 \/ Lib.IntTypes.range x Lib.IntTypes.U64 \/ Lib.IntTypes.range x Lib.IntTypes.U64 \/ Lib.IntTypes.range x Lib.IntTypes.U32 }
[]
Spec.SHA2.v'
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
x: Spec.Hash.Definitions.word a -> x: Prims.int { Lib.IntTypes.range x Lib.IntTypes.U32 \/ Lib.IntTypes.range x Lib.IntTypes.U64 \/ Lib.IntTypes.range x Lib.IntTypes.U64 \/ Lib.IntTypes.range x Lib.IntTypes.U32 }
{ "end_col": 45, "end_line": 31, "start_col": 35, "start_line": 29 }
Prims.Tot
val update: a:sha2_alg -> update_t a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let update = update_pre
val update: a:sha2_alg -> update_t a let update =
false
null
false
update_pre
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.SHA2.update_pre" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *) let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l [@"opaque_to_smt"] let shuffle_core_pre = shuffle_core_pre_ (* Scheduling function *) (* Incremental Version *) let ws0_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < block_word_length a}) (ws:k_w a) : k_w a = Seq.upd ws i (Seq.index block i) let wsi_pre_inner (a:sha2_alg) (i:nat{i >= block_word_length a /\ i < size_k_w a}) (ws:k_w a) : k_w a = let t16 = ws.[i - 16] in let t15 = ws.[i - 15] in let t7 = ws.[i - 7] in let t2 = ws.[i - 2] in let s1 = _sigma1 a t2 in let s0 = _sigma0 a t15 in Seq.upd ws i (s1 +. t7 +. s0 +. t16) let ws_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < size_k_w a}) (ws:k_w a) : k_w a = if i < block_word_length a then ws0_pre_inner a block i ws else wsi_pre_inner a i ws let ws_pre_ (a:sha2_alg) (block:block_w a) : k_w a = Lib.LoopCombinators.repeati (size_k_w a) (ws_pre_inner a block) (Seq.create (size_k_w a) (to_word a 0)) [@"opaque_to_smt"] let ws_pre = ws_pre_ (* Full shuffling function *) let shuffle_pre (a:sha2_alg) (hash:words_state a) (block:block_w a): Tot (words_state a) = let ws = ws_pre a block in let k = k0 a in Lib.LoopCombinators.repeati (size_k_w a) (fun i h -> shuffle_core_pre a k.[i] ws.[i] h) hash [@"opaque_to_smt"] let shuffle = shuffle_pre let init a = h0 a let update_pre (a:sha2_alg) (hash:words_state a) (block:bytes{S.length block = block_length a}): Tot (words_state a) = let block_w = words_of_bytes a #(block_word_length a) block in let hash_1 = shuffle a hash block_w in Spec.Loops.seq_map2 ( +. ) (hash <: Lib.Sequence.lseq (word a) (state_word_length a)) hash_1
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val update: a:sha2_alg -> update_t a
[]
Spec.SHA2.update
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> Spec.Hash.Definitions.update_t a
{ "end_col": 23, "end_line": 220, "start_col": 13, "start_line": 220 }
Prims.Tot
val op_Hat_Dot: #a: sha2_alg -> word a -> word a -> word a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC
val op_Hat_Dot: #a: sha2_alg -> word a -> word a -> word a let op_Hat_Dot (#a: sha2_alg) : word a -> word a -> word a =
false
null
false
match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Lib.IntTypes.op_Hat_Dot", "Lib.IntTypes.U32", "Lib.IntTypes.SEC", "Lib.IntTypes.U64", "Spec.Hash.Definitions.word" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val op_Hat_Dot: #a: sha2_alg -> word a -> word a -> word a
[]
Spec.SHA2.op_Hat_Dot
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
_: Spec.Hash.Definitions.word a -> _: Spec.Hash.Definitions.word a -> Spec.Hash.Definitions.word a
{ "end_col": 43, "end_line": 79, "start_col": 2, "start_line": 77 }
Prims.Tot
val ws_pre_inner (a: sha2_alg) (block: block_w a) (i: nat{i < size_k_w a}) (ws: k_w a) : k_w a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let ws_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < size_k_w a}) (ws:k_w a) : k_w a = if i < block_word_length a then ws0_pre_inner a block i ws else wsi_pre_inner a i ws
val ws_pre_inner (a: sha2_alg) (block: block_w a) (i: nat{i < size_k_w a}) (ws: k_w a) : k_w a let ws_pre_inner (a: sha2_alg) (block: block_w a) (i: nat{i < size_k_w a}) (ws: k_w a) : k_w a =
false
null
false
if i < block_word_length a then ws0_pre_inner a block i ws else wsi_pre_inner a i ws
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.SHA2.block_w", "Prims.nat", "Prims.b2t", "Prims.op_LessThan", "Spec.SHA2.size_k_w", "Spec.SHA2.k_w", "Spec.Hash.Definitions.block_word_length", "Spec.SHA2.ws0_pre_inner", "Prims.bool", "Spec.SHA2.wsi_pre_inner" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *) let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l [@"opaque_to_smt"] let shuffle_core_pre = shuffle_core_pre_ (* Scheduling function *) (* Incremental Version *) let ws0_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < block_word_length a}) (ws:k_w a) : k_w a = Seq.upd ws i (Seq.index block i) let wsi_pre_inner (a:sha2_alg) (i:nat{i >= block_word_length a /\ i < size_k_w a}) (ws:k_w a) : k_w a = let t16 = ws.[i - 16] in let t15 = ws.[i - 15] in let t7 = ws.[i - 7] in let t2 = ws.[i - 2] in let s1 = _sigma1 a t2 in let s0 = _sigma0 a t15 in Seq.upd ws i (s1 +. t7 +. s0 +. t16)
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val ws_pre_inner (a: sha2_alg) (block: block_w a) (i: nat{i < size_k_w a}) (ws: k_w a) : k_w a
[]
Spec.SHA2.ws_pre_inner
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> block: Spec.SHA2.block_w a -> i: Prims.nat{i < Spec.SHA2.size_k_w a} -> ws: Spec.SHA2.k_w a -> Spec.SHA2.k_w a
{ "end_col": 26, "end_line": 195, "start_col": 4, "start_line": 192 }
Prims.Tot
val op_Amp_Dot: #a: sha2_alg -> word a -> word a -> word a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC
val op_Amp_Dot: #a: sha2_alg -> word a -> word a -> word a let op_Amp_Dot (#a: sha2_alg) : word a -> word a -> word a =
false
null
false
match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Lib.IntTypes.op_Amp_Dot", "Lib.IntTypes.U32", "Lib.IntTypes.SEC", "Lib.IntTypes.U64", "Spec.Hash.Definitions.word" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val op_Amp_Dot: #a: sha2_alg -> word a -> word a -> word a
[]
Spec.SHA2.op_Amp_Dot
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
_: Spec.Hash.Definitions.word a -> _: Spec.Hash.Definitions.word a -> Spec.Hash.Definitions.word a
{ "end_col": 43, "end_line": 86, "start_col": 2, "start_line": 84 }
Prims.Tot
val op_Plus_Dot: #a: sha2_alg -> word a -> word a -> word a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC
val op_Plus_Dot: #a: sha2_alg -> word a -> word a -> word a let op_Plus_Dot (#a: sha2_alg) : word a -> word a -> word a =
false
null
false
match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Lib.IntTypes.op_Plus_Dot", "Lib.IntTypes.U32", "Lib.IntTypes.SEC", "Lib.IntTypes.U64", "Spec.Hash.Definitions.word" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val op_Plus_Dot: #a: sha2_alg -> word a -> word a -> word a
[]
Spec.SHA2.op_Plus_Dot
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
_: Spec.Hash.Definitions.word a -> _: Spec.Hash.Definitions.word a -> Spec.Hash.Definitions.word a
{ "end_col": 43, "end_line": 73, "start_col": 2, "start_line": 71 }
Prims.Tot
val to_word (a: sha2_alg) (n: nat{n < pow2 (word_n a)}) : word a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n
val to_word (a: sha2_alg) (n: nat{n < pow2 (word_n a)}) : word a let to_word (a: sha2_alg) (n: nat{n < pow2 (word_n a)}) : word a =
false
null
false
match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Prims.nat", "Prims.b2t", "Prims.op_LessThan", "Prims.pow2", "Spec.SHA2.word_n", "Lib.IntTypes.u32", "Lib.IntTypes.u64", "Spec.Hash.Definitions.word" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val to_word (a: sha2_alg) (n: nat{n < pow2 (word_n a)}) : word a
[]
Spec.SHA2.to_word
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> n: Prims.nat{n < Prims.pow2 (Spec.SHA2.word_n a)} -> Spec.Hash.Definitions.word a
{ "end_col": 32, "end_line": 27, "start_col": 2, "start_line": 25 }
Prims.Tot
val h0 (a: sha2_alg) : Tot (words_state a)
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512
val h0 (a: sha2_alg) : Tot (words_state a) let h0: a: sha2_alg -> Tot (words_state a) =
false
null
false
function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.SHA2.Constants.h224", "Spec.SHA2.Constants.h256", "Spec.SHA2.Constants.h384", "Spec.SHA2.Constants.h512", "Spec.Hash.Definitions.words_state" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5)
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val h0 (a: sha2_alg) : Tot (words_state a)
[]
Spec.SHA2.h0
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> Spec.Hash.Definitions.words_state a
{ "end_col": 22, "end_line": 141, "start_col": 44, "start_line": 137 }
Prims.Tot
val ws0_pre_inner (a: sha2_alg) (block: block_w a) (i: nat{i < block_word_length a}) (ws: k_w a) : k_w a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let ws0_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < block_word_length a}) (ws:k_w a) : k_w a = Seq.upd ws i (Seq.index block i)
val ws0_pre_inner (a: sha2_alg) (block: block_w a) (i: nat{i < block_word_length a}) (ws: k_w a) : k_w a let ws0_pre_inner (a: sha2_alg) (block: block_w a) (i: nat{i < block_word_length a}) (ws: k_w a) : k_w a =
false
null
false
Seq.upd ws i (Seq.index block i)
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.SHA2.block_w", "Prims.nat", "Prims.b2t", "Prims.op_LessThan", "Spec.Hash.Definitions.block_word_length", "Spec.SHA2.k_w", "FStar.Seq.Base.upd", "Spec.Hash.Definitions.word", "FStar.Seq.Base.index" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *) let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l [@"opaque_to_smt"] let shuffle_core_pre = shuffle_core_pre_ (* Scheduling function *) (* Incremental Version *)
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val ws0_pre_inner (a: sha2_alg) (block: block_w a) (i: nat{i < block_word_length a}) (ws: k_w a) : k_w a
[]
Spec.SHA2.ws0_pre_inner
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> block: Spec.SHA2.block_w a -> i: Prims.nat{i < Spec.Hash.Definitions.block_word_length a} -> ws: Spec.SHA2.k_w a -> Spec.SHA2.k_w a
{ "end_col": 38, "end_line": 180, "start_col": 6, "start_line": 180 }
Prims.Tot
val wsi_pre_inner (a: sha2_alg) (i: nat{i >= block_word_length a /\ i < size_k_w a}) (ws: k_w a) : k_w a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let wsi_pre_inner (a:sha2_alg) (i:nat{i >= block_word_length a /\ i < size_k_w a}) (ws:k_w a) : k_w a = let t16 = ws.[i - 16] in let t15 = ws.[i - 15] in let t7 = ws.[i - 7] in let t2 = ws.[i - 2] in let s1 = _sigma1 a t2 in let s0 = _sigma0 a t15 in Seq.upd ws i (s1 +. t7 +. s0 +. t16)
val wsi_pre_inner (a: sha2_alg) (i: nat{i >= block_word_length a /\ i < size_k_w a}) (ws: k_w a) : k_w a let wsi_pre_inner (a: sha2_alg) (i: nat{i >= block_word_length a /\ i < size_k_w a}) (ws: k_w a) : k_w a =
false
null
false
let t16 = ws.[ i - 16 ] in let t15 = ws.[ i - 15 ] in let t7 = ws.[ i - 7 ] in let t2 = ws.[ i - 2 ] in let s1 = _sigma1 a t2 in let s0 = _sigma0 a t15 in Seq.upd ws i (s1 +. t7 +. s0 +. t16)
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Prims.nat", "Prims.l_and", "Prims.b2t", "Prims.op_GreaterThanOrEqual", "Spec.Hash.Definitions.block_word_length", "Prims.op_LessThan", "Spec.SHA2.size_k_w", "Spec.SHA2.k_w", "FStar.Seq.Base.upd", "Spec.Hash.Definitions.word", "Spec.SHA2.op_Plus_Dot", "Spec.SHA2._sigma0", "Spec.SHA2._sigma1", "Spec.SHA2.op_String_Access", "Prims.op_Subtraction" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *) let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l [@"opaque_to_smt"] let shuffle_core_pre = shuffle_core_pre_ (* Scheduling function *) (* Incremental Version *) let ws0_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < block_word_length a}) (ws:k_w a) : k_w a = Seq.upd ws i (Seq.index block i)
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val wsi_pre_inner (a: sha2_alg) (i: nat{i >= block_word_length a /\ i < size_k_w a}) (ws: k_w a) : k_w a
[]
Spec.SHA2.wsi_pre_inner
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> i: Prims.nat{i >= Spec.Hash.Definitions.block_word_length a /\ i < Spec.SHA2.size_k_w a} -> ws: Spec.SHA2.k_w a -> Spec.SHA2.k_w a
{ "end_col": 42, "end_line": 189, "start_col": 103, "start_line": 182 }
Prims.Tot
val op_Greater_Greater_Dot: #a: sha2_alg -> word a -> shiftval (word_t a) -> word a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC
val op_Greater_Greater_Dot: #a: sha2_alg -> word a -> shiftval (word_t a) -> word a let op_Greater_Greater_Dot (#a: sha2_alg) : word a -> shiftval (word_t a) -> word a =
false
null
false
match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Lib.IntTypes.op_Greater_Greater_Dot", "Lib.IntTypes.U32", "Lib.IntTypes.SEC", "Lib.IntTypes.U64", "Spec.Hash.Definitions.word", "Lib.IntTypes.shiftval", "Spec.Hash.Definitions.word_t" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val op_Greater_Greater_Dot: #a: sha2_alg -> word a -> shiftval (word_t a) -> word a
[]
Spec.SHA2.op_Greater_Greater_Dot
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
_: Spec.Hash.Definitions.word a -> _: Lib.IntTypes.shiftval (Spec.Hash.Definitions.word_t a) -> Spec.Hash.Definitions.word a
{ "end_col": 44, "end_line": 104, "start_col": 2, "start_line": 102 }
Prims.Tot
val op_Greater_Greater_Greater_Dot: #a: sha2_alg -> word a -> rotval (word_t a) -> word a
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC
val op_Greater_Greater_Greater_Dot: #a: sha2_alg -> word a -> rotval (word_t a) -> word a let op_Greater_Greater_Greater_Dot (#a: sha2_alg) : word a -> rotval (word_t a) -> word a =
false
null
false
match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Lib.IntTypes.op_Greater_Greater_Greater_Dot", "Lib.IntTypes.U32", "Lib.IntTypes.SEC", "Lib.IntTypes.U64", "Spec.Hash.Definitions.word", "Lib.IntTypes.rotval", "Spec.Hash.Definitions.word_t" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val op_Greater_Greater_Greater_Dot: #a: sha2_alg -> word a -> rotval (word_t a) -> word a
[]
Spec.SHA2.op_Greater_Greater_Greater_Dot
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
_: Spec.Hash.Definitions.word a -> _: Lib.IntTypes.rotval (Spec.Hash.Definitions.word_t a) -> Spec.Hash.Definitions.word a
{ "end_col": 45, "end_line": 98, "start_col": 2, "start_line": 96 }
Prims.Tot
val k0 (a: sha2_alg) : Tot (m: S.seq (word a) {S.length m = size_k_w a})
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512
val k0 (a: sha2_alg) : Tot (m: S.seq (word a) {S.length m = size_k_w a}) let k0: a: sha2_alg -> Tot (m: S.seq (word a) {S.length m = size_k_w a}) =
false
null
false
function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.SHA2.Constants.k224_256", "Spec.SHA2.Constants.k384_512", "FStar.Seq.Base.seq", "Spec.Hash.Definitions.word", "Prims.b2t", "Prims.op_Equality", "Prims.nat", "FStar.Seq.Base.length", "Spec.SHA2.size_k_w" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val k0 (a: sha2_alg) : Tot (m: S.seq (word a) {S.length m = size_k_w a})
[]
Spec.SHA2.k0
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> m: FStar.Seq.Base.seq (Spec.Hash.Definitions.word a) {FStar.Seq.Base.length m = Spec.SHA2.size_k_w a}
{ "end_col": 26, "end_line": 147, "start_col": 73, "start_line": 143 }
Prims.Tot
val shuffle_core_pre_ (a: sha2_alg) (k_t ws_t: word a) (hash: words_state a) : Tot (words_state a)
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l
val shuffle_core_pre_ (a: sha2_alg) (k_t ws_t: word a) (hash: words_state a) : Tot (words_state a) let shuffle_core_pre_ (a: sha2_alg) (k_t ws_t: word a) (hash: words_state a) : Tot (words_state a) =
false
null
false
assert (7 <= S.length hash); let a0 = hash.[ 0 ] in let b0 = hash.[ 1 ] in let c0 = hash.[ 2 ] in let d0 = hash.[ 3 ] in let e0 = hash.[ 4 ] in let f0 = hash.[ 5 ] in let g0 = hash.[ 6 ] in let h0 = hash.[ 7 ] in assert (S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0] in assert_norm (List.Tot.length l = 8); S.seq_of_list l
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.Hash.Definitions.word", "Spec.Hash.Definitions.words_state", "FStar.Seq.Properties.seq_of_list", "Prims.unit", "FStar.Pervasives.assert_norm", "Prims.b2t", "Prims.op_Equality", "Prims.int", "FStar.List.Tot.Base.length", "Prims.list", "Prims.Cons", "Spec.SHA2.op_Plus_Dot", "Prims.Nil", "Spec.SHA2._Sigma0", "Spec.SHA2._Maj", "Spec.SHA2._Sigma1", "Spec.SHA2._Ch", "Prims._assert", "Prims.nat", "FStar.Seq.Base.length", "Spec.SHA2.k0", "Spec.SHA2.size_k_w", "Spec.SHA2.op_String_Access", "Prims.op_LessThanOrEqual" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *)
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val shuffle_core_pre_ (a: sha2_alg) (k_t ws_t: word a) (hash: words_state a) : Tot (words_state a)
[]
Spec.SHA2.shuffle_core_pre_
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> k_t: Spec.Hash.Definitions.word a -> ws_t: Spec.Hash.Definitions.word a -> hash: Spec.Hash.Definitions.words_state a -> Spec.Hash.Definitions.words_state a
{ "end_col": 17, "end_line": 170, "start_col": 7, "start_line": 154 }
Prims.Tot
val update_pre (a: sha2_alg) (hash: words_state a) (block: bytes{S.length block = block_length a}) : Tot (words_state a)
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let update_pre (a:sha2_alg) (hash:words_state a) (block:bytes{S.length block = block_length a}): Tot (words_state a) = let block_w = words_of_bytes a #(block_word_length a) block in let hash_1 = shuffle a hash block_w in Spec.Loops.seq_map2 ( +. ) (hash <: Lib.Sequence.lseq (word a) (state_word_length a)) hash_1
val update_pre (a: sha2_alg) (hash: words_state a) (block: bytes{S.length block = block_length a}) : Tot (words_state a) let update_pre (a: sha2_alg) (hash: words_state a) (block: bytes{S.length block = block_length a}) : Tot (words_state a) =
false
null
false
let block_w = words_of_bytes a #(block_word_length a) block in let hash_1 = shuffle a hash block_w in Spec.Loops.seq_map2 ( +. ) (hash <: Lib.Sequence.lseq (word a) (state_word_length a)) hash_1
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.Hash.Definitions.words_state", "Spec.Hash.Definitions.bytes", "Prims.b2t", "Prims.op_Equality", "Prims.int", "Prims.l_or", "Prims.op_GreaterThanOrEqual", "Prims.op_disEquality", "FStar.Seq.Base.length", "Lib.IntTypes.uint8", "Spec.Hash.Definitions.block_length", "Spec.Loops.seq_map2", "Spec.Hash.Definitions.word", "Spec.SHA2.op_Plus_Dot", "Lib.Sequence.lseq", "Spec.Hash.Definitions.state_word_length", "Spec.SHA2.shuffle", "Spec.Hash.Definitions.block_word_length", "Spec.Hash.Definitions.words_of_bytes" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5) let h0: a:sha2_alg -> Tot (words_state a) = function | SHA2_224 -> C.h224 | SHA2_256 -> C.h256 | SHA2_384 -> C.h384 | SHA2_512 -> C.h512 let k0: a:sha2_alg -> Tot (m:S.seq (word a) {S.length m = size_k_w a}) = function | SHA2_224 -> C.k224_256 | SHA2_256 -> C.k224_256 | SHA2_384 -> C.k384_512 | SHA2_512 -> C.k384_512 unfold let (.[]) = S.index (* Core shuffling function *) let shuffle_core_pre_ (a:sha2_alg) (k_t: word a) (ws_t: word a) (hash:words_state a) : Tot (words_state a) = (**) assert(7 <= S.length hash); let a0 = hash.[0] in let b0 = hash.[1] in let c0 = hash.[2] in let d0 = hash.[3] in let e0 = hash.[4] in let f0 = hash.[5] in let g0 = hash.[6] in let h0 = hash.[7] in (**) assert(S.length (k0 a) = size_k_w a); let t1 = h0 +. (_Sigma1 a e0) +. (_Ch a e0 f0 g0) +. k_t +. ws_t in let t2 = (_Sigma0 a a0) +. (_Maj a a0 b0 c0) in let l = [ t1 +. t2; a0; b0; c0; d0 +. t1; e0; f0; g0 ] in assert_norm (List.Tot.length l = 8); S.seq_of_list l [@"opaque_to_smt"] let shuffle_core_pre = shuffle_core_pre_ (* Scheduling function *) (* Incremental Version *) let ws0_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < block_word_length a}) (ws:k_w a) : k_w a = Seq.upd ws i (Seq.index block i) let wsi_pre_inner (a:sha2_alg) (i:nat{i >= block_word_length a /\ i < size_k_w a}) (ws:k_w a) : k_w a = let t16 = ws.[i - 16] in let t15 = ws.[i - 15] in let t7 = ws.[i - 7] in let t2 = ws.[i - 2] in let s1 = _sigma1 a t2 in let s0 = _sigma0 a t15 in Seq.upd ws i (s1 +. t7 +. s0 +. t16) let ws_pre_inner (a:sha2_alg) (block:block_w a) (i:nat{i < size_k_w a}) (ws:k_w a) : k_w a = if i < block_word_length a then ws0_pre_inner a block i ws else wsi_pre_inner a i ws let ws_pre_ (a:sha2_alg) (block:block_w a) : k_w a = Lib.LoopCombinators.repeati (size_k_w a) (ws_pre_inner a block) (Seq.create (size_k_w a) (to_word a 0)) [@"opaque_to_smt"] let ws_pre = ws_pre_ (* Full shuffling function *) let shuffle_pre (a:sha2_alg) (hash:words_state a) (block:block_w a): Tot (words_state a) = let ws = ws_pre a block in let k = k0 a in Lib.LoopCombinators.repeati (size_k_w a) (fun i h -> shuffle_core_pre a k.[i] ws.[i] h) hash [@"opaque_to_smt"] let shuffle = shuffle_pre let init a = h0 a
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val update_pre (a: sha2_alg) (hash: words_state a) (block: bytes{S.length block = block_length a}) : Tot (words_state a)
[]
Spec.SHA2.update_pre
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> hash: Spec.Hash.Definitions.words_state a -> block: Spec.Hash.Definitions.bytes {FStar.Seq.Base.length block = Spec.Hash.Definitions.block_length a} -> Spec.Hash.Definitions.words_state a
{ "end_col": 94, "end_line": 217, "start_col": 118, "start_line": 214 }
Prims.Tot
val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a)
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2)
val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) let _Sigma0 a x =
false
null
false
(x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2)
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.Hash.Definitions.word", "Spec.SHA2.op_Hat_Dot", "Spec.SHA2.op_Greater_Greater_Greater_Dot", "Spec.SHA2.__proj__Mkops__item__c0", "Spec.SHA2.op0", "Spec.SHA2.__proj__Mkops__item__c1", "Spec.SHA2.__proj__Mkops__item__c2" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a)
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a)
[]
Spec.SHA2._Sigma0
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> x: Spec.Hash.Definitions.word a -> Spec.Hash.Definitions.word a
{ "end_col": 83, "end_line": 120, "start_col": 18, "start_line": 120 }
Prims.Tot
val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a)
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let _sigma1 a x = (x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5)
val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) let _sigma1 a x =
false
null
false
(x >>>. (op0 a).e3) ^. (x >>>. (op0 a).e4) ^. (x >>. (op0 a).e5)
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.Hash.Definitions.word", "Spec.SHA2.op_Hat_Dot", "Spec.SHA2.op_Greater_Greater_Greater_Dot", "Spec.SHA2.__proj__Mkops__item__e3", "Spec.SHA2.op0", "Spec.SHA2.__proj__Mkops__item__e4", "Spec.SHA2.op_Greater_Greater_Dot", "Spec.SHA2.__proj__Mkops__item__e5" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2) inline_for_extraction val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a)
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val _sigma1: a:sha2_alg -> x:(word a) -> Tot (word a)
[]
Spec.SHA2._sigma1
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> x: Spec.Hash.Definitions.word a -> Spec.Hash.Definitions.word a
{ "end_col": 82, "end_line": 135, "start_col": 18, "start_line": 135 }
Prims.Tot
val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a)
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5)
val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) let _Sigma1 a x =
false
null
false
(x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5)
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.Hash.Definitions.word", "Spec.SHA2.op_Hat_Dot", "Spec.SHA2.op_Greater_Greater_Greater_Dot", "Spec.SHA2.__proj__Mkops__item__c3", "Spec.SHA2.op0", "Spec.SHA2.__proj__Mkops__item__c4", "Spec.SHA2.__proj__Mkops__item__c5" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a)
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a)
[]
Spec.SHA2._Sigma1
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> x: Spec.Hash.Definitions.word a -> Spec.Hash.Definitions.word a
{ "end_col": 83, "end_line": 125, "start_col": 18, "start_line": 125 }
Prims.Tot
val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a)
[ { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "S" }, { "abbrev": true, "full_module": "Spec.SHA2.Constants", "short_module": "C" }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let _sigma0 a x = (x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2)
val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) let _sigma0 a x =
false
null
false
(x >>>. (op0 a).e0) ^. (x >>>. (op0 a).e1) ^. (x >>. (op0 a).e2)
{ "checked_file": "Spec.SHA2.fst.checked", "dependencies": [ "Spec.SHA2.Constants.fst.checked", "Spec.Loops.fst.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.LoopCombinators.fsti.checked", "Lib.IntTypes.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked" ], "interface_file": true, "source_file": "Spec.SHA2.fst" }
[ "total" ]
[ "Spec.Hash.Definitions.sha2_alg", "Spec.Hash.Definitions.word", "Spec.SHA2.op_Hat_Dot", "Spec.SHA2.op_Greater_Greater_Greater_Dot", "Spec.SHA2.__proj__Mkops__item__e0", "Spec.SHA2.op0", "Spec.SHA2.__proj__Mkops__item__e1", "Spec.SHA2.op_Greater_Greater_Dot", "Spec.SHA2.__proj__Mkops__item__e2" ]
[]
module Spec.SHA2 open Lib.IntTypes module C = Spec.SHA2.Constants module S = FStar.Seq open Spec.Hash.Definitions (* The core compression, padding and extraction functions for all SHA2 * algorithms. *) (* Define the length of the constants. Also the number of scheduling rounds. *) inline_for_extraction let size_k_w: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 64 | SHA2_384 | SHA2_512 -> 80 inline_for_extraction let word_n: sha2_alg -> Tot nat = function | SHA2_224 | SHA2_256 -> 32 | SHA2_384 | SHA2_512 -> 64 inline_for_extraction let to_word (a:sha2_alg) (n:nat{n < pow2 (word_n a)}) : word a = match a with | SHA2_224 | SHA2_256 -> u32 n | SHA2_384 | SHA2_512 -> u64 n let v' (#a: sha2_alg) (x:word a) = match a with | SHA2_224 | SHA2_256 -> uint_v #U32 #SEC x | SHA2_384 | SHA2_512 -> uint_v #U64 #SEC x let k_w (a: sha2_alg) = m:S.seq (word a) {S.length m = size_k_w a} let block_w (a: sha2_alg) = m:S.seq (word a) {S.length m = block_word_length a} let counter = nat inline_for_extraction type ops = { c0: size_t; c1: size_t; c2: size_t; c3: size_t; c4: size_t; c5: size_t; e0: size_t; e1: size_t; e2: size_t; e3: size_t; e4: size_t; e5: size_t; } (* Definition of constants used in word functions *) inline_for_extraction let op224_256: ops = { c0 = 2ul; c1 = 13ul; c2 = 22ul; c3 = 6ul; c4 = 11ul; c5 = 25ul; e0 = 7ul; e1 = 18ul; e2 = 3ul; e3 = 17ul; e4 = 19ul; e5 = 10ul } inline_for_extraction let op384_512: ops = { c0 = 28ul; c1 = 34ul; c2 = 39ul; c3 = 14ul; c4 = 18ul; c5 = 41ul; e0 = 1ul ; e1 = 8ul; e2 = 7ul; e3 = 19ul; e4 = 61ul; e5 = 6ul } inline_for_extraction let op0: a:sha2_alg -> Tot ops = function | SHA2_224 -> op224_256 | SHA2_256 -> op224_256 | SHA2_384 -> op384_512 | SHA2_512 -> op384_512 inline_for_extraction let ( +. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( +. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( +. ) #U64 #SEC inline_for_extraction let ( ^. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ^. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ^. ) #U64 #SEC inline_for_extraction let ( &. ) (#a:sha2_alg): word a -> word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( &. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( &. ) #U64 #SEC inline_for_extraction let ( ~. ) (#a:sha2_alg): word a -> word a = match a with | SHA2_224 | SHA2_256 -> ( ~. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( ~. ) #U64 #SEC inline_for_extraction let ( >>>. ) (#a:sha2_alg): word a -> rotval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>>. ) #U64 #SEC inline_for_extraction let ( >>. ) (#a:sha2_alg): word a -> shiftval (word_t a) -> word a = match a with | SHA2_224 | SHA2_256 -> ( >>. ) #U32 #SEC | SHA2_384 | SHA2_512 -> ( >>. ) #U64 #SEC (* Definition of the SHA2 word functions *) inline_for_extraction val _Ch: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Ch a x y z = (x &. y) ^. (~.x &. z) inline_for_extraction val _Maj: a:sha2_alg -> x:(word a) -> y:(word a) -> z:(word a) -> Tot (word a) inline_for_extraction let _Maj a x y z = (x &. y) ^. ((x &. z) ^. (y &. z)) inline_for_extraction val _Sigma0: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma0 a x = (x >>>. (op0 a).c0) ^. (x >>>. (op0 a).c1) ^. (x >>>. (op0 a).c2) inline_for_extraction val _Sigma1: a:sha2_alg -> x:(word a) -> Tot (word a) inline_for_extraction let _Sigma1 a x = (x >>>. (op0 a).c3) ^. (x >>>. (op0 a).c4) ^. (x >>>. (op0 a).c5) inline_for_extraction val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a)
false
false
Spec.SHA2.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val _sigma0: a:sha2_alg -> x:(word a) -> Tot (word a)
[]
Spec.SHA2._sigma0
{ "file_name": "specs/Spec.SHA2.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
a: Spec.Hash.Definitions.sha2_alg -> x: Spec.Hash.Definitions.word a -> Spec.Hash.Definitions.word a
{ "end_col": 82, "end_line": 130, "start_col": 18, "start_line": 130 }
FStar.All.ALL
[ { "abbrev": true, "full_module": "Lib.PrintSequence", "short_module": "PS" }, { "abbrev": false, "full_module": "Spec.HMAC_DRBG.Test.Vectors", "short_module": null }, { "abbrev": false, "full_module": "Spec.HMAC_DRBG", "short_module": null }, { "abbrev": false, "full_module": "Spec.Agile.HMAC", "short_module": null }, { "abbrev": false, "full_module": "Lib.Meta", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Seq", "short_module": null }, { "abbrev": false, "full_module": "Spec.HMAC_DRBG", "short_module": null }, { "abbrev": false, "full_module": "Spec.HMAC_DRBG", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let test_vec {a; entropy_input; entropy_input_reseed; nonce; personalization_string; additional_input_reseed; additional_input_1; additional_input_2; returned_bits} = let returned_bytes_len = String.strlen returned_bits / 2 in let entropy_input_len = String.strlen entropy_input / 2 in let entropy_input_reseed_len = String.strlen entropy_input_reseed / 2 in let nonce_len = String.strlen nonce / 2 in let personalization_string_len = String.strlen personalization_string / 2 in let additional_input_reseed_len = String.strlen additional_input_reseed / 2 in let additional_input_1_len = String.strlen additional_input_1 / 2 in let additional_input_2_len = String.strlen additional_input_2 / 2 in let returned_bits_len = String.strlen returned_bits / 2 in if not (is_supported_alg a && min_length a <= entropy_input_len && entropy_input_len <= max_length && min_length a / 2 <= nonce_len && nonce_len <= max_length && personalization_string_len <= max_personalization_string_length && entropy_input_reseed_len <= max_length && additional_input_reseed_len <= max_additional_input_length && additional_input_1_len <= max_additional_input_length && additional_input_2_len <= max_additional_input_length && 0 < returned_bits_len && returned_bits_len <= max_output_length) then false else let _ = hmac_input_bound a in let st = instantiate #a (from_hex entropy_input) (from_hex nonce) (from_hex personalization_string) in let st = reseed st (from_hex entropy_input_reseed) (from_hex additional_input_reseed) in match generate st returned_bytes_len (from_hex additional_input_1) with | None -> false | Some (_, st) -> match generate st returned_bytes_len (from_hex additional_input_2) with | None -> false | Some (out, st) -> PS.print_compare true returned_bytes_len (from_hex returned_bits) out
let test_vec { a = a ; entropy_input = entropy_input ; entropy_input_reseed = entropy_input_reseed ; nonce = nonce ; personalization_string = personalization_string ; additional_input_reseed = additional_input_reseed ; additional_input_1 = additional_input_1 ; additional_input_2 = additional_input_2 ; returned_bits = returned_bits } =
true
null
false
let returned_bytes_len = String.strlen returned_bits / 2 in let entropy_input_len = String.strlen entropy_input / 2 in let entropy_input_reseed_len = String.strlen entropy_input_reseed / 2 in let nonce_len = String.strlen nonce / 2 in let personalization_string_len = String.strlen personalization_string / 2 in let additional_input_reseed_len = String.strlen additional_input_reseed / 2 in let additional_input_1_len = String.strlen additional_input_1 / 2 in let additional_input_2_len = String.strlen additional_input_2 / 2 in let returned_bits_len = String.strlen returned_bits / 2 in if not (is_supported_alg a && min_length a <= entropy_input_len && entropy_input_len <= max_length && min_length a / 2 <= nonce_len && nonce_len <= max_length && personalization_string_len <= max_personalization_string_length && entropy_input_reseed_len <= max_length && additional_input_reseed_len <= max_additional_input_length && additional_input_1_len <= max_additional_input_length && additional_input_2_len <= max_additional_input_length && 0 < returned_bits_len && returned_bits_len <= max_output_length) then false else let _ = hmac_input_bound a in let st = instantiate #a (from_hex entropy_input) (from_hex nonce) (from_hex personalization_string) in let st = reseed st (from_hex entropy_input_reseed) (from_hex additional_input_reseed) in match generate st returned_bytes_len (from_hex additional_input_1) with | None -> false | Some (_, st) -> match generate st returned_bytes_len (from_hex additional_input_2) with | None -> false | Some (out, st) -> PS.print_compare true returned_bytes_len (from_hex returned_bits) out
{ "checked_file": "Spec.HMAC_DRBG.Test.fst.checked", "dependencies": [ "Spec.HMAC_DRBG.Test.Vectors.fst.checked", "Spec.HMAC_DRBG.fsti.checked", "Spec.Agile.HMAC.fsti.checked", "prims.fst.checked", "Lib.PrintSequence.fsti.checked", "Lib.Meta.fst.checked", "Lib.IntTypes.fsti.checked", "FStar.String.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.fst.checked", "FStar.IO.fst.checked" ], "interface_file": false, "source_file": "Spec.HMAC_DRBG.Test.fst" }
[]
[ "Spec.HMAC_DRBG.Test.Vectors.vec", "Spec.HMAC_DRBG.Test.Vectors.supported_alg", "Lib.Meta.hex_string", "Prims.op_Negation", "Prims.op_AmpAmp", "Spec.HMAC_DRBG.Test.Vectors.is_supported_alg", "Prims.op_LessThanOrEqual", "Spec.HMAC_DRBG.min_length", "Spec.HMAC_DRBG.max_length", "Prims.op_Division", "Spec.HMAC_DRBG.max_personalization_string_length", "Spec.HMAC_DRBG.max_additional_input_length", "Prims.op_LessThan", "Spec.HMAC_DRBG.max_output_length", "Prims.bool", "Spec.HMAC_DRBG.generate", "Lib.Meta.from_hex", "Spec.Agile.HMAC.lbytes", "Spec.HMAC_DRBG.state", "Lib.PrintSequence.print_compare", "Spec.HMAC_DRBG.reseed", "Spec.HMAC_DRBG.instantiate", "Prims.unit", "Spec.HMAC_DRBG.hmac_input_bound", "Prims.int", "FStar.String.strlen" ]
[]
module Spec.HMAC_DRBG.Test open FStar.Seq open Lib.IntTypes open Lib.Meta open Spec.Agile.HMAC open Spec.HMAC_DRBG open Spec.HMAC_DRBG.Test.Vectors module PS = Lib.PrintSequence #set-options "--max_fuel 1 --max_ifuel 1 --z3rlimit 50" let test_vec {a; entropy_input; entropy_input_reseed; nonce; personalization_string; additional_input_reseed; additional_input_1; additional_input_2;
false
false
Spec.HMAC_DRBG.Test.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 1, "max_ifuel": 1, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val test_vec : _: Spec.HMAC_DRBG.Test.Vectors.vec -> FStar.All.ALL Prims.bool
[]
Spec.HMAC_DRBG.Test.test_vec
{ "file_name": "specs/tests/Spec.HMAC_DRBG.Test.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
_: Spec.HMAC_DRBG.Test.Vectors.vec -> FStar.All.ALL Prims.bool
{ "end_col": 95, "end_line": 55, "start_col": 1, "start_line": 19 }
FStar.All.ALL
[ { "abbrev": true, "full_module": "Lib.PrintSequence", "short_module": "PS" }, { "abbrev": false, "full_module": "Spec.HMAC_DRBG.Test.Vectors", "short_module": null }, { "abbrev": false, "full_module": "Spec.HMAC_DRBG", "short_module": null }, { "abbrev": false, "full_module": "Spec.Agile.HMAC", "short_module": null }, { "abbrev": false, "full_module": "Lib.Meta", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Seq", "short_module": null }, { "abbrev": false, "full_module": "Spec.HMAC_DRBG", "short_module": null }, { "abbrev": false, "full_module": "Spec.HMAC_DRBG", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let test () = let result = List.for_all test_vec test_vectors in if result then begin IO.print_string "\n\n[HMAC-DRBG] PASS\n"; true end else begin IO.print_string "\n\n[HMAC-DRBG] FAIL\n"; false end
let test () =
true
null
false
let result = List.for_all test_vec test_vectors in if result then (IO.print_string "\n\n[HMAC-DRBG] PASS\n"; true) else (IO.print_string "\n\n[HMAC-DRBG] FAIL\n"; false)
{ "checked_file": "Spec.HMAC_DRBG.Test.fst.checked", "dependencies": [ "Spec.HMAC_DRBG.Test.Vectors.fst.checked", "Spec.HMAC_DRBG.fsti.checked", "Spec.Agile.HMAC.fsti.checked", "prims.fst.checked", "Lib.PrintSequence.fsti.checked", "Lib.Meta.fst.checked", "Lib.IntTypes.fsti.checked", "FStar.String.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.fst.checked", "FStar.IO.fst.checked" ], "interface_file": false, "source_file": "Spec.HMAC_DRBG.Test.fst" }
[]
[ "Prims.unit", "Prims.bool", "FStar.IO.print_string", "FStar.List.for_all", "Spec.HMAC_DRBG.Test.Vectors.vec", "Spec.HMAC_DRBG.Test.test_vec", "Spec.HMAC_DRBG.Test.Vectors.test_vectors" ]
[]
module Spec.HMAC_DRBG.Test open FStar.Seq open Lib.IntTypes open Lib.Meta open Spec.Agile.HMAC open Spec.HMAC_DRBG open Spec.HMAC_DRBG.Test.Vectors module PS = Lib.PrintSequence #set-options "--max_fuel 1 --max_ifuel 1 --z3rlimit 50" let test_vec {a; entropy_input; entropy_input_reseed; nonce; personalization_string; additional_input_reseed; additional_input_1; additional_input_2; returned_bits} = let returned_bytes_len = String.strlen returned_bits / 2 in let entropy_input_len = String.strlen entropy_input / 2 in let entropy_input_reseed_len = String.strlen entropy_input_reseed / 2 in let nonce_len = String.strlen nonce / 2 in let personalization_string_len = String.strlen personalization_string / 2 in let additional_input_reseed_len = String.strlen additional_input_reseed / 2 in let additional_input_1_len = String.strlen additional_input_1 / 2 in let additional_input_2_len = String.strlen additional_input_2 / 2 in let returned_bits_len = String.strlen returned_bits / 2 in if not (is_supported_alg a && min_length a <= entropy_input_len && entropy_input_len <= max_length && min_length a / 2 <= nonce_len && nonce_len <= max_length && personalization_string_len <= max_personalization_string_length && entropy_input_reseed_len <= max_length && additional_input_reseed_len <= max_additional_input_length && additional_input_1_len <= max_additional_input_length && additional_input_2_len <= max_additional_input_length && 0 < returned_bits_len && returned_bits_len <= max_output_length) then false else let _ = hmac_input_bound a in let st = instantiate #a (from_hex entropy_input) (from_hex nonce) (from_hex personalization_string) in let st = reseed st (from_hex entropy_input_reseed) (from_hex additional_input_reseed) in match generate st returned_bytes_len (from_hex additional_input_1) with | None -> false | Some (_, st) -> match generate st returned_bytes_len (from_hex additional_input_2) with | None -> false | Some (out, st) -> PS.print_compare true returned_bytes_len (from_hex returned_bits) out
false
false
Spec.HMAC_DRBG.Test.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 1, "max_ifuel": 1, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val test : _: Prims.unit -> FStar.All.ALL Prims.bool
[]
Spec.HMAC_DRBG.Test.test
{ "file_name": "specs/tests/Spec.HMAC_DRBG.Test.fst", "git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e", "git_url": "https://github.com/hacl-star/hacl-star.git", "project_name": "hacl-star" }
_: Prims.unit -> FStar.All.ALL Prims.bool
{ "end_col": 7, "end_line": 68, "start_col": 13, "start_line": 57 }
Prims.Tot
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x
let right_unitality_lemma (m: Type) (u: m) (mult: (m -> m -> m)) =
false
null
false
forall (x: m). x `mult` u == x
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "Prims.l_Forall", "Prims.eq2", "Prims.logical" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *)
false
false
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val right_unitality_lemma : m: Type -> u4: m -> mult: (_: m -> _: m -> m) -> Prims.logical
[]
FStar.Algebra.Monoid.right_unitality_lemma
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
m: Type -> u4: m -> mult: (_: m -> _: m -> m) -> Prims.logical
{ "end_col": 31, "end_line": 30, "start_col": 2, "start_line": 30 }
Prims.Tot
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z)
let associativity_lemma (m: Type) (mult: (m -> m -> m)) =
false
null
false
forall (x: m) (y: m) (z: m). (x `mult` y) `mult` z == x `mult` (y `mult` z)
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "Prims.l_Forall", "Prims.eq2", "Prims.logical" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x
false
false
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val associativity_lemma : m: Type -> mult: (_: m -> _: m -> m) -> Prims.logical
[]
FStar.Algebra.Monoid.associativity_lemma
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
m: Type -> mult: (_: m -> _: m -> m) -> Prims.logical
{ "end_col": 64, "end_line": 36, "start_col": 2, "start_line": 36 }
Prims.Tot
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x
let left_unitality_lemma (m: Type) (u: m) (mult: (m -> m -> m)) =
false
null
false
forall (x: m). u `mult` x == x
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "Prims.l_Forall", "Prims.eq2", "Prims.logical" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x
false
false
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val left_unitality_lemma : m: Type -> u7: m -> mult: (_: m -> _: m -> m) -> Prims.logical
[]
FStar.Algebra.Monoid.left_unitality_lemma
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
m: Type -> u7: m -> mult: (_: m -> _: m -> m) -> Prims.logical
{ "end_col": 31, "end_line": 33, "start_col": 2, "start_line": 33 }
Prims.Tot
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let monoid_morphism_unit_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = f (Monoid?.unit ma) == Monoid?.unit mb
let monoid_morphism_unit_lemma (#a #b: Type) (f: (a -> b)) (ma: monoid a) (mb: monoid b) =
false
null
false
f (Monoid?.unit ma) == Monoid?.unit mb
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "FStar.Algebra.Monoid.monoid", "Prims.eq2", "FStar.Algebra.Monoid.__proj__Monoid__item__unit", "Prims.logical" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_ let bool_or_monoid : monoid bool = let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_ let bool_xor_monoid : monoid bool = let xor b1 b2 = (b1 || b2) && not (b1 && b2) in intro_monoid bool false xor let lift_monoid_option (#a:Type) (m:monoid a) : monoid (option a) = let mult (x y:option a) = match x, y with | Some x0, Some y0 -> Some (m.mult x0 y0) | _, _ -> None in intro_monoid (option a) (Some m.unit) mult (* Definition of a morphism of monoid *)
false
false
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val monoid_morphism_unit_lemma : f: (_: a -> b) -> ma: FStar.Algebra.Monoid.monoid a -> mb: FStar.Algebra.Monoid.monoid b -> Prims.logical
[]
FStar.Algebra.Monoid.monoid_morphism_unit_lemma
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
f: (_: a -> b) -> ma: FStar.Algebra.Monoid.monoid a -> mb: FStar.Algebra.Monoid.monoid b -> Prims.logical
{ "end_col": 40, "end_line": 147, "start_col": 2, "start_line": 147 }
Prims.Tot
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let monoid_morphism_mult_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = forall (x y:a). Monoid?.mult mb (f x) (f y) == f (Monoid?.mult ma x y)
let monoid_morphism_mult_lemma (#a #b: Type) (f: (a -> b)) (ma: monoid a) (mb: monoid b) =
false
null
false
forall (x: a) (y: a). Monoid?.mult mb (f x) (f y) == f (Monoid?.mult ma x y)
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "FStar.Algebra.Monoid.monoid", "Prims.l_Forall", "Prims.eq2", "FStar.Algebra.Monoid.__proj__Monoid__item__mult", "Prims.logical" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_ let bool_or_monoid : monoid bool = let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_ let bool_xor_monoid : monoid bool = let xor b1 b2 = (b1 || b2) && not (b1 && b2) in intro_monoid bool false xor let lift_monoid_option (#a:Type) (m:monoid a) : monoid (option a) = let mult (x y:option a) = match x, y with | Some x0, Some y0 -> Some (m.mult x0 y0) | _, _ -> None in intro_monoid (option a) (Some m.unit) mult (* Definition of a morphism of monoid *) let monoid_morphism_unit_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = f (Monoid?.unit ma) == Monoid?.unit mb
false
false
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val monoid_morphism_mult_lemma : f: (_: a -> b) -> ma: FStar.Algebra.Monoid.monoid a -> mb: FStar.Algebra.Monoid.monoid b -> Prims.logical
[]
FStar.Algebra.Monoid.monoid_morphism_mult_lemma
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
f: (_: a -> b) -> ma: FStar.Algebra.Monoid.monoid a -> mb: FStar.Algebra.Monoid.monoid b -> Prims.logical
{ "end_col": 72, "end_line": 150, "start_col": 2, "start_line": 150 }
Prims.Tot
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let unit_act_lemma (m a:Type) (u:m) (act:m -> a -> a) = forall (y:a). u `act` y == y
let unit_act_lemma (m a: Type) (u: m) (act: (m -> a -> a)) =
false
null
false
forall (y: a). u `act` y == y
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "Prims.l_Forall", "Prims.eq2", "Prims.logical" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_ let bool_or_monoid : monoid bool = let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_ let bool_xor_monoid : monoid bool = let xor b1 b2 = (b1 || b2) && not (b1 && b2) in intro_monoid bool false xor let lift_monoid_option (#a:Type) (m:monoid a) : monoid (option a) = let mult (x y:option a) = match x, y with | Some x0, Some y0 -> Some (m.mult x0 y0) | _, _ -> None in intro_monoid (option a) (Some m.unit) mult (* Definition of a morphism of monoid *) let monoid_morphism_unit_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = f (Monoid?.unit ma) == Monoid?.unit mb let monoid_morphism_mult_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = forall (x y:a). Monoid?.mult mb (f x) (f y) == f (Monoid?.mult ma x y) type monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = | MonoidMorphism : unit:squash (monoid_morphism_unit_lemma f ma mb) -> mult:squash (monoid_morphism_mult_lemma f ma mb) -> monoid_morphism f ma mb let intro_monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) : Pure (monoid_morphism f ma mb) (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) (ensures (fun _ -> True)) = MonoidMorphism () () let embed_nat_int (n:nat) : int = n let _ = intro_monoid_morphism embed_nat_int nat_plus_monoid int_plus_monoid let neg (p:prop) : prop = ~p let _ = assert (neg True <==> False) ; PropExt.apply (neg True) False ; let mult_lemma_helper (p q:prop) : Lemma (neg (p /\ q) == (neg p \/ neg q)) = assert (neg (p /\ q) <==> (neg p \/ neg q)) ; PropExt.apply (neg (p /\ q)) (neg p \/ neg q) in forall_intro_2 mult_lemma_helper ; intro_monoid_morphism neg conjunction_monoid disjunction_monoid let _ = assert (neg False <==> True) ; PropExt.apply (neg False) True ; let mult_lemma_helper (p q:prop) : Lemma (neg (p \/ q) == (neg p /\ neg q)) = assert (neg (p \/ q) <==> (neg p /\ neg q)) ; PropExt.apply (neg (p \/ q)) (neg p /\ neg q) in forall_intro_2 mult_lemma_helper ; intro_monoid_morphism neg disjunction_monoid conjunction_monoid (* Definition of a left action *) let mult_act_lemma (m a:Type) (mult:m -> m -> m) (act:m -> a -> a) = forall (x x':m) (y:a). (x `mult` x') `act` y == x `act` (x' `act` y)
false
false
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val unit_act_lemma : m: Type -> a: Type -> u85: m -> act: (_: m -> _: a -> a) -> Prims.logical
[]
FStar.Algebra.Monoid.unit_act_lemma
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
m: Type -> a: Type -> u85: m -> act: (_: m -> _: a -> a) -> Prims.logical
{ "end_col": 30, "end_line": 195, "start_col": 2, "start_line": 195 }
Prims.Tot
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let mult_act_lemma (m a:Type) (mult:m -> m -> m) (act:m -> a -> a) = forall (x x':m) (y:a). (x `mult` x') `act` y == x `act` (x' `act` y)
let mult_act_lemma (m a: Type) (mult: (m -> m -> m)) (act: (m -> a -> a)) =
false
null
false
forall (x: m) (x': m) (y: a). (x `mult` x') `act` y == x `act` (x' `act` y)
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "Prims.l_Forall", "Prims.eq2", "Prims.logical" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_ let bool_or_monoid : monoid bool = let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_ let bool_xor_monoid : monoid bool = let xor b1 b2 = (b1 || b2) && not (b1 && b2) in intro_monoid bool false xor let lift_monoid_option (#a:Type) (m:monoid a) : monoid (option a) = let mult (x y:option a) = match x, y with | Some x0, Some y0 -> Some (m.mult x0 y0) | _, _ -> None in intro_monoid (option a) (Some m.unit) mult (* Definition of a morphism of monoid *) let monoid_morphism_unit_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = f (Monoid?.unit ma) == Monoid?.unit mb let monoid_morphism_mult_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = forall (x y:a). Monoid?.mult mb (f x) (f y) == f (Monoid?.mult ma x y) type monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = | MonoidMorphism : unit:squash (monoid_morphism_unit_lemma f ma mb) -> mult:squash (monoid_morphism_mult_lemma f ma mb) -> monoid_morphism f ma mb let intro_monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) : Pure (monoid_morphism f ma mb) (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) (ensures (fun _ -> True)) = MonoidMorphism () () let embed_nat_int (n:nat) : int = n let _ = intro_monoid_morphism embed_nat_int nat_plus_monoid int_plus_monoid let neg (p:prop) : prop = ~p let _ = assert (neg True <==> False) ; PropExt.apply (neg True) False ; let mult_lemma_helper (p q:prop) : Lemma (neg (p /\ q) == (neg p \/ neg q)) = assert (neg (p /\ q) <==> (neg p \/ neg q)) ; PropExt.apply (neg (p /\ q)) (neg p \/ neg q) in forall_intro_2 mult_lemma_helper ; intro_monoid_morphism neg conjunction_monoid disjunction_monoid let _ = assert (neg False <==> True) ; PropExt.apply (neg False) True ; let mult_lemma_helper (p q:prop) : Lemma (neg (p \/ q) == (neg p /\ neg q)) = assert (neg (p \/ q) <==> (neg p /\ neg q)) ; PropExt.apply (neg (p \/ q)) (neg p /\ neg q) in forall_intro_2 mult_lemma_helper ; intro_monoid_morphism neg disjunction_monoid conjunction_monoid (* Definition of a left action *)
false
false
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val mult_act_lemma : m: Type -> a: Type -> mult: (_: m -> _: m -> m) -> act: (_: m -> _: a -> a) -> Prims.logical
[]
FStar.Algebra.Monoid.mult_act_lemma
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
m: Type -> a: Type -> mult: (_: m -> _: m -> m) -> act: (_: m -> _: a -> a) -> Prims.logical
{ "end_col": 70, "end_line": 192, "start_col": 2, "start_line": 192 }
Prims.Tot
val embed_nat_int (n: nat) : int
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let embed_nat_int (n:nat) : int = n
val embed_nat_int (n: nat) : int let embed_nat_int (n: nat) : int =
false
null
false
n
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "Prims.nat", "Prims.int" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_ let bool_or_monoid : monoid bool = let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_ let bool_xor_monoid : monoid bool = let xor b1 b2 = (b1 || b2) && not (b1 && b2) in intro_monoid bool false xor let lift_monoid_option (#a:Type) (m:monoid a) : monoid (option a) = let mult (x y:option a) = match x, y with | Some x0, Some y0 -> Some (m.mult x0 y0) | _, _ -> None in intro_monoid (option a) (Some m.unit) mult (* Definition of a morphism of monoid *) let monoid_morphism_unit_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = f (Monoid?.unit ma) == Monoid?.unit mb let monoid_morphism_mult_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = forall (x y:a). Monoid?.mult mb (f x) (f y) == f (Monoid?.mult ma x y) type monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = | MonoidMorphism : unit:squash (monoid_morphism_unit_lemma f ma mb) -> mult:squash (monoid_morphism_mult_lemma f ma mb) -> monoid_morphism f ma mb let intro_monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) : Pure (monoid_morphism f ma mb) (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) (ensures (fun _ -> True)) = MonoidMorphism () ()
false
true
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val embed_nat_int (n: nat) : int
[]
FStar.Algebra.Monoid.embed_nat_int
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
n: Prims.nat -> Prims.int
{ "end_col": 35, "end_line": 165, "start_col": 34, "start_line": 165 }
Prims.Tot
val neg (p: prop) : prop
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let neg (p:prop) : prop = ~p
val neg (p: prop) : prop let neg (p: prop) : prop =
false
null
false
~p
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "Prims.prop", "Prims.l_not" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_ let bool_or_monoid : monoid bool = let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_ let bool_xor_monoid : monoid bool = let xor b1 b2 = (b1 || b2) && not (b1 && b2) in intro_monoid bool false xor let lift_monoid_option (#a:Type) (m:monoid a) : monoid (option a) = let mult (x y:option a) = match x, y with | Some x0, Some y0 -> Some (m.mult x0 y0) | _, _ -> None in intro_monoid (option a) (Some m.unit) mult (* Definition of a morphism of monoid *) let monoid_morphism_unit_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = f (Monoid?.unit ma) == Monoid?.unit mb let monoid_morphism_mult_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = forall (x y:a). Monoid?.mult mb (f x) (f y) == f (Monoid?.mult ma x y) type monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = | MonoidMorphism : unit:squash (monoid_morphism_unit_lemma f ma mb) -> mult:squash (monoid_morphism_mult_lemma f ma mb) -> monoid_morphism f ma mb let intro_monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) : Pure (monoid_morphism f ma mb) (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) (ensures (fun _ -> True)) = MonoidMorphism () () let embed_nat_int (n:nat) : int = n let _ = intro_monoid_morphism embed_nat_int nat_plus_monoid int_plus_monoid
false
true
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val neg (p: prop) : prop
[]
FStar.Algebra.Monoid.neg
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
p: Prims.prop -> Prims.prop
{ "end_col": 28, "end_line": 168, "start_col": 26, "start_line": 168 }
Prims.Tot
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let left_action_morphism (#a #b #ma #mb:Type) (f:a -> b) (* mf ought to be a monoid morphism but we don't use this fact in the property *) (mf: ma -> mb) (#mma:monoid ma) (#mmb:monoid mb) (la:left_action mma a) (lb:left_action mmb b) = forall (g:ma) (x:a). LAct?.act lb (mf g) (f x) == f (LAct?.act la g x)
let left_action_morphism (#a #b #ma #mb: Type) (f: (a -> b)) (mf: (ma -> mb)) (#mma: monoid ma) (#mmb: monoid mb) (la: left_action mma a) (lb: left_action mmb b) =
false
null
false
forall (g: ma) (x: a). LAct?.act lb (mf g) (f x) == f (LAct?.act la g x)
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "FStar.Algebra.Monoid.monoid", "FStar.Algebra.Monoid.left_action", "Prims.l_Forall", "Prims.eq2", "FStar.Algebra.Monoid.__proj__LAct__item__act", "Prims.logical" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_ let bool_or_monoid : monoid bool = let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_ let bool_xor_monoid : monoid bool = let xor b1 b2 = (b1 || b2) && not (b1 && b2) in intro_monoid bool false xor let lift_monoid_option (#a:Type) (m:monoid a) : monoid (option a) = let mult (x y:option a) = match x, y with | Some x0, Some y0 -> Some (m.mult x0 y0) | _, _ -> None in intro_monoid (option a) (Some m.unit) mult (* Definition of a morphism of monoid *) let monoid_morphism_unit_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = f (Monoid?.unit ma) == Monoid?.unit mb let monoid_morphism_mult_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = forall (x y:a). Monoid?.mult mb (f x) (f y) == f (Monoid?.mult ma x y) type monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = | MonoidMorphism : unit:squash (monoid_morphism_unit_lemma f ma mb) -> mult:squash (monoid_morphism_mult_lemma f ma mb) -> monoid_morphism f ma mb let intro_monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) : Pure (monoid_morphism f ma mb) (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) (ensures (fun _ -> True)) = MonoidMorphism () () let embed_nat_int (n:nat) : int = n let _ = intro_monoid_morphism embed_nat_int nat_plus_monoid int_plus_monoid let neg (p:prop) : prop = ~p let _ = assert (neg True <==> False) ; PropExt.apply (neg True) False ; let mult_lemma_helper (p q:prop) : Lemma (neg (p /\ q) == (neg p \/ neg q)) = assert (neg (p /\ q) <==> (neg p \/ neg q)) ; PropExt.apply (neg (p /\ q)) (neg p \/ neg q) in forall_intro_2 mult_lemma_helper ; intro_monoid_morphism neg conjunction_monoid disjunction_monoid let _ = assert (neg False <==> True) ; PropExt.apply (neg False) True ; let mult_lemma_helper (p q:prop) : Lemma (neg (p \/ q) == (neg p /\ neg q)) = assert (neg (p \/ q) <==> (neg p /\ neg q)) ; PropExt.apply (neg (p \/ q)) (neg p /\ neg q) in forall_intro_2 mult_lemma_helper ; intro_monoid_morphism neg disjunction_monoid conjunction_monoid (* Definition of a left action *) let mult_act_lemma (m a:Type) (mult:m -> m -> m) (act:m -> a -> a) = forall (x x':m) (y:a). (x `mult` x') `act` y == x `act` (x' `act` y) let unit_act_lemma (m a:Type) (u:m) (act:m -> a -> a) = forall (y:a). u `act` y == y unopteq type left_action (#m:Type) (mm:monoid m) (a:Type) = | LAct : act:(m -> a -> a) -> mult_lemma: squash (mult_act_lemma m a (Monoid?.mult mm) act) -> unit_lemma: squash (unit_act_lemma m a (Monoid?.unit mm) act) -> left_action mm a let left_action_morphism (#a #b #ma #mb:Type) (f:a -> b) (* mf ought to be a monoid morphism but we don't use this fact in the property *) (mf: ma -> mb) (#mma:monoid ma) (#mmb:monoid mb) (la:left_action mma a)
false
false
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val left_action_morphism : f: (_: a -> b) -> mf: (_: ma -> mb) -> la: FStar.Algebra.Monoid.left_action mma a -> lb: FStar.Algebra.Monoid.left_action mmb b -> Prims.logical
[]
FStar.Algebra.Monoid.left_action_morphism
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
f: (_: a -> b) -> mf: (_: ma -> mb) -> la: FStar.Algebra.Monoid.left_action mma a -> lb: FStar.Algebra.Monoid.left_action mmb b -> Prims.logical
{ "end_col": 72, "end_line": 214, "start_col": 2, "start_line": 214 }
Prims.Tot
val bool_and_monoid:monoid bool
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_
val bool_and_monoid:monoid bool let bool_and_monoid:monoid bool =
false
null
false
let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "FStar.Algebra.Monoid.intro_monoid", "Prims.bool", "Prims.op_AmpAmp" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult
false
true
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val bool_and_monoid:monoid bool
[]
FStar.Algebra.Monoid.bool_and_monoid
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
FStar.Algebra.Monoid.monoid Prims.bool
{ "end_col": 29, "end_line": 126, "start_col": 35, "start_line": 124 }
Prims.Tot
val bool_or_monoid:monoid bool
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let bool_or_monoid : monoid bool = let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_
val bool_or_monoid:monoid bool let bool_or_monoid:monoid bool =
false
null
false
let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "FStar.Algebra.Monoid.intro_monoid", "Prims.bool", "Prims.op_BarBar" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_
false
true
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val bool_or_monoid:monoid bool
[]
FStar.Algebra.Monoid.bool_or_monoid
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
FStar.Algebra.Monoid.monoid Prims.bool
{ "end_col": 29, "end_line": 130, "start_col": 34, "start_line": 128 }
Prims.Pure
val intro_monoid (m: Type) (u: m) (mult: (m -> m -> m)) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult))
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () ()
val intro_monoid (m: Type) (u: m) (mult: (m -> m -> m)) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) let intro_monoid (m: Type) (u: m) (mult: (m -> m -> m)) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) =
false
null
false
Monoid u mult () () ()
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[]
[ "FStar.Algebra.Monoid.Monoid", "FStar.Algebra.Monoid.monoid", "Prims.l_and", "FStar.Algebra.Monoid.right_unitality_lemma", "FStar.Algebra.Monoid.left_unitality_lemma", "FStar.Algebra.Monoid.associativity_lemma", "Prims.eq2", "FStar.Algebra.Monoid.__proj__Monoid__item__unit", "FStar.Algebra.Monoid.__proj__Monoid__item__mult" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult))
false
false
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val intro_monoid (m: Type) (u: m) (mult: (m -> m -> m)) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult))
[]
FStar.Algebra.Monoid.intro_monoid
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
m: Type -> u32: m -> mult: (_: m -> _: m -> m) -> Prims.Pure (FStar.Algebra.Monoid.monoid m)
{ "end_col": 24, "end_line": 54, "start_col": 2, "start_line": 54 }
Prims.Tot
val int_plus_monoid:monoid int
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let int_plus_monoid : monoid int = intro_monoid int 0 (+)
val int_plus_monoid:monoid int let int_plus_monoid:monoid int =
false
null
false
intro_monoid int 0 ( + )
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "FStar.Algebra.Monoid.intro_monoid", "Prims.int", "Prims.op_Addition" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add
false
true
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val int_plus_monoid:monoid int
[]
FStar.Algebra.Monoid.int_plus_monoid
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
FStar.Algebra.Monoid.monoid Prims.int
{ "end_col": 24, "end_line": 64, "start_col": 2, "start_line": 64 }
Prims.Tot
val nat_plus_monoid:monoid nat
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add
val nat_plus_monoid:monoid nat let nat_plus_monoid:monoid nat =
false
null
false
let add (x y: nat) : nat = x + y in intro_monoid nat 0 add
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "FStar.Algebra.Monoid.intro_monoid", "Prims.nat", "Prims.op_Addition" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *)
false
true
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val nat_plus_monoid:monoid nat
[]
FStar.Algebra.Monoid.nat_plus_monoid
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
FStar.Algebra.Monoid.monoid Prims.nat
{ "end_col": 24, "end_line": 61, "start_col": 34, "start_line": 59 }
Prims.Tot
val disjunction_monoid:monoid prop
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult
val disjunction_monoid:monoid prop let disjunction_monoid:monoid prop =
false
null
false
let u:prop = singleton False in let mult (p q: prop) : prop = p \/ q in let left_unitality_helper (p: prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p); PropExt.apply (u `mult` p) p in let right_unitality_helper (p: prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p); PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3: prop) : Lemma ((p1 `mult` p2) `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert ((p1 `mult` p2) `mult` p3 <==> p1 `mult` (p2 `mult` p3)); PropExt.apply ((p1 `mult` p2) `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper; assert (right_unitality_lemma prop u mult); forall_intro left_unitality_helper; assert (left_unitality_lemma prop u mult); forall_intro_3 associativity_helper; assert (associativity_lemma prop mult); intro_monoid prop u mult
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "FStar.Algebra.Monoid.intro_monoid", "Prims.prop", "Prims.unit", "Prims._assert", "FStar.Algebra.Monoid.associativity_lemma", "FStar.Classical.forall_intro_3", "Prims.eq2", "FStar.Algebra.Monoid.left_unitality_lemma", "FStar.Classical.forall_intro", "FStar.Algebra.Monoid.right_unitality_lemma", "Prims.l_True", "Prims.squash", "Prims.Nil", "FStar.Pervasives.pattern", "FStar.PropositionalExtensionality.apply", "Prims.l_iff", "Prims.l_or", "FStar.Pervasives.singleton", "Prims.l_False" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult
false
true
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val disjunction_monoid:monoid prop
[]
FStar.Algebra.Monoid.disjunction_monoid
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
FStar.Algebra.Monoid.monoid Prims.prop
{ "end_col": 26, "end_line": 122, "start_col": 38, "start_line": 97 }
Prims.Tot
val lift_monoid_option (#a: Type) (m: monoid a) : monoid (option a)
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let lift_monoid_option (#a:Type) (m:monoid a) : monoid (option a) = let mult (x y:option a) = match x, y with | Some x0, Some y0 -> Some (m.mult x0 y0) | _, _ -> None in intro_monoid (option a) (Some m.unit) mult
val lift_monoid_option (#a: Type) (m: monoid a) : monoid (option a) let lift_monoid_option (#a: Type) (m: monoid a) : monoid (option a) =
false
null
false
let mult (x y: option a) = match x, y with | Some x0, Some y0 -> Some (m.mult x0 y0) | _, _ -> None in intro_monoid (option a) (Some m.unit) mult
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "FStar.Algebra.Monoid.monoid", "FStar.Algebra.Monoid.intro_monoid", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.Some", "FStar.Algebra.Monoid.__proj__Monoid__item__unit", "FStar.Pervasives.Native.Mktuple2", "FStar.Algebra.Monoid.__proj__Monoid__item__mult", "FStar.Pervasives.Native.None" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_ let bool_or_monoid : monoid bool = let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_ let bool_xor_monoid : monoid bool = let xor b1 b2 = (b1 || b2) && not (b1 && b2) in intro_monoid bool false xor
false
false
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val lift_monoid_option (#a: Type) (m: monoid a) : monoid (option a)
[]
FStar.Algebra.Monoid.lift_monoid_option
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
m: FStar.Algebra.Monoid.monoid a -> FStar.Algebra.Monoid.monoid (FStar.Pervasives.Native.option a)
{ "end_col": 44, "end_line": 142, "start_col": 67, "start_line": 136 }
Prims.Tot
val conjunction_monoid:monoid prop
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult
val conjunction_monoid:monoid prop let conjunction_monoid:monoid prop =
false
null
false
let u:prop = singleton True in let mult (p q: prop) : prop = p /\ q in let left_unitality_helper (p: prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p); PropExt.apply (u `mult` p) p in let right_unitality_helper (p: prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p); PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3: prop) : Lemma ((p1 `mult` p2) `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert ((p1 `mult` p2) `mult` p3 <==> p1 `mult` (p2 `mult` p3)); PropExt.apply ((p1 `mult` p2) `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper; assert (right_unitality_lemma prop u mult); forall_intro left_unitality_helper; assert (left_unitality_lemma prop u mult); forall_intro_3 associativity_helper; assert (associativity_lemma prop mult); intro_monoid prop u mult
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "FStar.Algebra.Monoid.intro_monoid", "Prims.prop", "Prims.unit", "Prims._assert", "FStar.Algebra.Monoid.associativity_lemma", "FStar.Classical.forall_intro_3", "Prims.eq2", "FStar.Algebra.Monoid.left_unitality_lemma", "FStar.Classical.forall_intro", "FStar.Algebra.Monoid.right_unitality_lemma", "Prims.l_True", "Prims.squash", "Prims.Nil", "FStar.Pervasives.pattern", "FStar.PropositionalExtensionality.apply", "Prims.l_iff", "Prims.l_and", "FStar.Pervasives.singleton" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *)
false
true
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val conjunction_monoid:monoid prop
[]
FStar.Algebra.Monoid.conjunction_monoid
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
FStar.Algebra.Monoid.monoid Prims.prop
{ "end_col": 26, "end_line": 94, "start_col": 38, "start_line": 69 }
Prims.Tot
val bool_xor_monoid:monoid bool
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let bool_xor_monoid : monoid bool = let xor b1 b2 = (b1 || b2) && not (b1 && b2) in intro_monoid bool false xor
val bool_xor_monoid:monoid bool let bool_xor_monoid:monoid bool =
false
null
false
let xor b1 b2 = (b1 || b2) && not (b1 && b2) in intro_monoid bool false xor
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[ "total" ]
[ "FStar.Algebra.Monoid.intro_monoid", "Prims.bool", "Prims.op_AmpAmp", "Prims.op_BarBar", "Prims.op_Negation" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_ let bool_or_monoid : monoid bool = let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_
false
true
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val bool_xor_monoid:monoid bool
[]
FStar.Algebra.Monoid.bool_xor_monoid
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
FStar.Algebra.Monoid.monoid Prims.bool
{ "end_col": 29, "end_line": 134, "start_col": 35, "start_line": 132 }
Prims.Pure
val intro_monoid_morphism (#a #b: Type) (f: (a -> b)) (ma: monoid a) (mb: monoid b) : Pure (monoid_morphism f ma mb) (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) (ensures (fun _ -> True))
[ { "abbrev": true, "full_module": "FStar.PropositionalExtensionality", "short_module": "PropExt" }, { "abbrev": false, "full_module": "FStar.Classical", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Algebra", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let intro_monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) : Pure (monoid_morphism f ma mb) (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) (ensures (fun _ -> True)) = MonoidMorphism () ()
val intro_monoid_morphism (#a #b: Type) (f: (a -> b)) (ma: monoid a) (mb: monoid b) : Pure (monoid_morphism f ma mb) (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) (ensures (fun _ -> True)) let intro_monoid_morphism (#a #b: Type) (f: (a -> b)) (ma: monoid a) (mb: monoid b) : Pure (monoid_morphism f ma mb) (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) (ensures (fun _ -> True)) =
false
null
false
MonoidMorphism () ()
{ "checked_file": "FStar.Algebra.Monoid.fst.checked", "dependencies": [ "prims.fst.checked", "FStar.PropositionalExtensionality.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "FStar.Algebra.Monoid.fst" }
[]
[ "FStar.Algebra.Monoid.monoid", "FStar.Algebra.Monoid.MonoidMorphism", "FStar.Algebra.Monoid.monoid_morphism", "Prims.l_and", "FStar.Algebra.Monoid.monoid_morphism_unit_lemma", "FStar.Algebra.Monoid.monoid_morphism_mult_lemma", "Prims.l_True" ]
[]
(* Copyright 2008-2018 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.Algebra.Monoid open FStar.Classical module PropExt = FStar.PropositionalExtensionality (* * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality * these should go away with proper prop support * also see the comment in PropositionalExtensionality.fst *) (** Definition of a monoid *) let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). x `mult` u == x let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = forall (x:m). u `mult` x == x let associativity_lemma (m:Type) (mult:m -> m -> m) = forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) unopteq type monoid (m:Type) = | Monoid : unit:m -> mult:(m -> m -> m) -> right_unitality:squash (right_unitality_lemma m unit mult) -> left_unitality:squash (left_unitality_lemma m unit mult) -> associativity:squash (associativity_lemma m mult) -> monoid m let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) : Pure (monoid m) (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) = Monoid u mult () () () (** Some monoid structures *) let nat_plus_monoid : monoid nat = let add (x y : nat) : nat = x + y in intro_monoid nat 0 add let int_plus_monoid : monoid int = intro_monoid int 0 (+) (* let int_mul_monoid : monoid int = *) (* intro_monoid int 1 op_Multiply *) let conjunction_monoid : monoid prop = let u : prop = singleton True in let mult (p q : prop) : prop = p /\ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let disjunction_monoid : monoid prop = let u : prop = singleton False in let mult (p q : prop) : prop = p \/ q in let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = assert ((u `mult` p) <==> p) ; PropExt.apply (u `mult` p) p in let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = assert ((p `mult` u) <==> p) ; PropExt.apply (p `mult` u) p in let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) in forall_intro right_unitality_helper ; assert (right_unitality_lemma prop u mult) ; forall_intro left_unitality_helper ; assert (left_unitality_lemma prop u mult) ; forall_intro_3 associativity_helper; assert (associativity_lemma prop mult) ; intro_monoid prop u mult let bool_and_monoid : monoid bool = let and_ b1 b2 = b1 && b2 in intro_monoid bool true and_ let bool_or_monoid : monoid bool = let or_ b1 b2 = b1 || b2 in intro_monoid bool false or_ let bool_xor_monoid : monoid bool = let xor b1 b2 = (b1 || b2) && not (b1 && b2) in intro_monoid bool false xor let lift_monoid_option (#a:Type) (m:monoid a) : monoid (option a) = let mult (x y:option a) = match x, y with | Some x0, Some y0 -> Some (m.mult x0 y0) | _, _ -> None in intro_monoid (option a) (Some m.unit) mult (* Definition of a morphism of monoid *) let monoid_morphism_unit_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = f (Monoid?.unit ma) == Monoid?.unit mb let monoid_morphism_mult_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = forall (x y:a). Monoid?.mult mb (f x) (f y) == f (Monoid?.mult ma x y) type monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = | MonoidMorphism : unit:squash (monoid_morphism_unit_lemma f ma mb) -> mult:squash (monoid_morphism_mult_lemma f ma mb) -> monoid_morphism f ma mb let intro_monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) : Pure (monoid_morphism f ma mb) (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) (ensures (fun _ -> True))
false
false
FStar.Algebra.Monoid.fst
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val intro_monoid_morphism (#a #b: Type) (f: (a -> b)) (ma: monoid a) (mb: monoid b) : Pure (monoid_morphism f ma mb) (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) (ensures (fun _ -> True))
[]
FStar.Algebra.Monoid.intro_monoid_morphism
{ "file_name": "ulib/FStar.Algebra.Monoid.fst", "git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
f: (_: a -> b) -> ma: FStar.Algebra.Monoid.monoid a -> mb: FStar.Algebra.Monoid.monoid b -> Prims.Pure (FStar.Algebra.Monoid.monoid_morphism f ma mb)
{ "end_col": 22, "end_line": 163, "start_col": 2, "start_line": 163 }
Prims.GTot
val make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot t)) : GTot Type0
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2
val make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot t)) : GTot Type0 let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot t)) : GTot Type0 =
false
null
false
forall (s1: bytes{Seq.length s1 == sz}) (s2: bytes{Seq.length s2 == sz}). {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "sometrivial" ]
[ "Prims.nat", "LowParse.Bytes.bytes", "Prims.eq2", "FStar.Seq.Base.length", "LowParse.Bytes.byte", "Prims.l_Forall", "Prims.l_imp", "FStar.Seq.Base.equal" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t))
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot t)) : GTot Type0
[]
LowParse.Spec.Combinators.make_total_constant_size_parser_precond
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
sz: Prims.nat -> t: Type -> f: (s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz} -> Prims.GTot t) -> Prims.GTot Type0
{ "end_col": 34, "end_line": 149, "start_col": 2, "start_line": 148 }
Prims.Tot
val fail_parser' (t: Type) : Tot (tot_bare_parser t)
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None
val fail_parser' (t: Type) : Tot (tot_bare_parser t) let fail_parser' (t: Type) : Tot (tot_bare_parser t) =
false
null
false
fun _ -> None
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Bytes.bytes", "FStar.Pervasives.Native.None", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "FStar.Pervasives.Native.option", "LowParse.Spec.Base.tot_bare_parser" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type)
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val fail_parser' (t: Type) : Tot (tot_bare_parser t)
[]
LowParse.Spec.Combinators.fail_parser'
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
t: Type -> LowParse.Spec.Base.tot_bare_parser t
{ "end_col": 15, "end_line": 230, "start_col": 2, "start_line": 230 }
Prims.Tot
val parse_ret_kind:parser_kind
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
val parse_ret_kind:parser_kind let parse_ret_kind:parser_kind =
false
null
false
strong_parser_kind 0 0 (Some ParserKindMetadataTotal)
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Base.strong_parser_kind", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.parser_kind_metadata_some", "LowParse.Spec.Base.ParserKindMetadataTotal" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction
false
true
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val parse_ret_kind:parser_kind
[]
LowParse.Spec.Combinators.parse_ret_kind
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
LowParse.Spec.Base.parser_kind
{ "end_col": 55, "end_line": 195, "start_col": 2, "start_line": 195 }
Prims.GTot
val and_then_cases_injective (#t #t': Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2
val and_then_cases_injective (#t #t': Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 let and_then_cases_injective (#t #t': Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 =
false
null
false
forall (x1: t) (x2: t) (b1: bytes) (b2: bytes). {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "sometrivial" ]
[ "LowParse.Spec.Base.bare_parser", "Prims.l_Forall", "LowParse.Bytes.bytes", "Prims.l_imp", "LowParse.Spec.Combinators.and_then_cases_injective_precond", "Prims.eq2", "LowParse.Spec.Base.parse" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t')))
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val and_then_cases_injective (#t #t': Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0
[]
LowParse.Spec.Combinators.and_then_cases_injective
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
p': (_: t -> LowParse.Spec.Base.bare_parser t') -> Prims.GTot Type0
{ "end_col": 10, "end_line": 308, "start_col": 2, "start_line": 306 }
Prims.GTot
val synth_injective (#t1 #t2: Type) (f: (t1 -> GTot t2)) : GTot Type0
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 = forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x'
val synth_injective (#t1 #t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 let synth_injective (#t1 #t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 =
false
null
false
forall (x: t1) (x': t1). {:pattern (f x); (f x')} f x == f x' ==> x == x'
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "sometrivial" ]
[ "Prims.l_Forall", "Prims.l_imp", "Prims.eq2" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else () #set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" let and_then_correct (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') /\ parser_kind_prop (and_then_kind k k') (and_then_bare p p') )) = parser_kind_prop_equiv k p; Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x)); parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p'); and_then_injective p p'; and_then_no_lookahead p p' #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" val and_then (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Pure (parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun _ -> True)) val and_then_eq (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) (input: bytes) : Lemma (requires (and_then_cases_injective p')) (ensures (parse (and_then p p') input == and_then_bare p p' input)) val tot_and_then (#k: parser_kind) (#t:Type) (p:tot_parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (tot_parser k' t'))) : Pure (tot_parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun y -> forall x . parse y x == parse (and_then #k p #k' p') x )) /// monadic return for the parser monad unfold let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') = fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) unfold let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') = [@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in parse_fret' f v let synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2))
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val synth_injective (#t1 #t2: Type) (f: (t1 -> GTot t2)) : GTot Type0
[]
LowParse.Spec.Combinators.synth_injective
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
f: (_: t1 -> Prims.GTot t2) -> Prims.GTot Type0
{ "end_col": 71, "end_line": 569, "start_col": 2, "start_line": 569 }
Prims.Tot
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail)
let parse_false_kind =
false
null
false
strong_parser_kind 0 0 (Some ParserKindMetadataFail)
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Base.strong_parser_kind", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.parser_kind_metadata_some", "LowParse.Spec.Base.ParserKindMetadataFail" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x)
false
true
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val parse_false_kind : LowParse.Spec.Base.parser_kind
[]
LowParse.Spec.Combinators.parse_false_kind
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
LowParse.Spec.Base.parser_kind
{ "end_col": 75, "end_line": 261, "start_col": 23, "start_line": 261 }
Prims.Tot
val bare_serialize_synth (#k: parser_kind) (#t1 #t2: Type) (p1: parser k t1) (f2: (t1 -> GTot t2)) (s1: serializer p1) (g1: (t2 -> GTot t1)) : Tot (bare_serializer t2)
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let bare_serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Tot (bare_serializer t2) = fun (x: t2) -> s1 (g1 x)
val bare_serialize_synth (#k: parser_kind) (#t1 #t2: Type) (p1: parser k t1) (f2: (t1 -> GTot t2)) (s1: serializer p1) (g1: (t2 -> GTot t1)) : Tot (bare_serializer t2) let bare_serialize_synth (#k: parser_kind) (#t1 #t2: Type) (p1: parser k t1) (f2: (t1 -> GTot t2)) (s1: serializer p1) (g1: (t2 -> GTot t1)) : Tot (bare_serializer t2) =
false
null
false
fun (x: t2) -> s1 (g1 x)
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "LowParse.Bytes.bytes", "LowParse.Spec.Base.bare_serializer" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else () #set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" let and_then_correct (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') /\ parser_kind_prop (and_then_kind k k') (and_then_bare p p') )) = parser_kind_prop_equiv k p; Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x)); parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p'); and_then_injective p p'; and_then_no_lookahead p p' #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" val and_then (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Pure (parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun _ -> True)) val and_then_eq (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) (input: bytes) : Lemma (requires (and_then_cases_injective p')) (ensures (parse (and_then p p') input == and_then_bare p p' input)) val tot_and_then (#k: parser_kind) (#t:Type) (p:tot_parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (tot_parser k' t'))) : Pure (tot_parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun y -> forall x . parse y x == parse (and_then #k p #k' p') x )) /// monadic return for the parser monad unfold let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') = fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) unfold let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') = [@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in parse_fret' f v let synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 = forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x' let synth_injective_intro (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : Lemma (requires (forall (x x' : t1) . f x == f x' ==> x == x')) (ensures (synth_injective f)) = () let synth_injective_intro' (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (prf: ( (x: t1) -> (x' : t1) -> Lemma (requires (f x == f x')) (ensures (x == x')) )) : Lemma (synth_injective f) = Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x)) let parse_synth' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Tot (bare_parser t2) = fun b -> match parse p1 b with | None -> None | Some (x1, consumed) -> Some (f2 x1, consumed) val parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Pure (parser k t2) (requires ( synth_injective f2 )) (ensures (fun _ -> True)) val parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) let parse_synth_eq2 (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (sq: squash (synth_injective f2)) (b: bytes) : Lemma (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) = parse_synth_eq p1 f2 b val tot_parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) : Pure (tot_parser k t2) (requires ( synth_injective f2 )) (ensures (fun y -> forall x . parse y x == parse (parse_synth #k p1 f2) x )) let tot_parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b)) = parse_synth_eq #k p1 f2 b let bare_serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1)
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val bare_serialize_synth (#k: parser_kind) (#t1 #t2: Type) (p1: parser k t1) (f2: (t1 -> GTot t2)) (s1: serializer p1) (g1: (t2 -> GTot t1)) : Tot (bare_serializer t2)
[]
LowParse.Spec.Combinators.bare_serialize_synth
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
p1: LowParse.Spec.Base.parser k t1 -> f2: (_: t1 -> Prims.GTot t2) -> s1: LowParse.Spec.Base.serializer p1 -> g1: (_: t2 -> Prims.GTot t1) -> LowParse.Spec.Base.bare_serializer t2
{ "end_col": 26, "end_line": 676, "start_col": 2, "start_line": 676 }
Prims.Tot
val and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None
val and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t =
false
null
false
match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Base.parser_kind_metadata_t", "FStar.Pervasives.Native.Mktuple2", "FStar.Pervasives.Native.option", "LowParse.Spec.Base.parser_kind_metadata_some", "FStar.Pervasives.Native.tuple2", "FStar.Pervasives.Native.None" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t)
false
true
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t
[]
LowParse.Spec.Combinators.and_then_metadata
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
k1: LowParse.Spec.Base.parser_kind_metadata_t -> k2: LowParse.Spec.Base.parser_kind_metadata_t -> LowParse.Spec.Base.parser_kind_metadata_t
{ "end_col": 13, "end_line": 442, "start_col": 2, "start_line": 438 }
Prims.Tot
val lift_serializer' (#k: parser_kind) (#t: Type) (#f: (unit -> GTot (parser k t))) (s: (unit -> GTot (serializer (f ())))) : Tot (bare_serializer t)
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let lift_serializer' (#k: parser_kind) (#t: Type) (#f: unit -> GTot (parser k t)) (s: unit -> GTot (serializer (f ()))) : Tot (bare_serializer t) = fun (x: t) -> serialize (s ()) x
val lift_serializer' (#k: parser_kind) (#t: Type) (#f: (unit -> GTot (parser k t))) (s: (unit -> GTot (serializer (f ())))) : Tot (bare_serializer t) let lift_serializer' (#k: parser_kind) (#t: Type) (#f: (unit -> GTot (parser k t))) (s: (unit -> GTot (serializer (f ())))) : Tot (bare_serializer t) =
false
null
false
fun (x: t) -> serialize (s ()) x
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Base.parser_kind", "Prims.unit", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "LowParse.Spec.Base.serialize", "LowParse.Bytes.bytes", "LowParse.Spec.Base.bare_serializer" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else () #set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" let and_then_correct (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') /\ parser_kind_prop (and_then_kind k k') (and_then_bare p p') )) = parser_kind_prop_equiv k p; Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x)); parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p'); and_then_injective p p'; and_then_no_lookahead p p' #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" val and_then (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Pure (parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun _ -> True)) val and_then_eq (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) (input: bytes) : Lemma (requires (and_then_cases_injective p')) (ensures (parse (and_then p p') input == and_then_bare p p' input)) val tot_and_then (#k: parser_kind) (#t:Type) (p:tot_parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (tot_parser k' t'))) : Pure (tot_parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun y -> forall x . parse y x == parse (and_then #k p #k' p') x )) /// monadic return for the parser monad unfold let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') = fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) unfold let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') = [@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in parse_fret' f v let synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 = forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x' let synth_injective_intro (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : Lemma (requires (forall (x x' : t1) . f x == f x' ==> x == x')) (ensures (synth_injective f)) = () let synth_injective_intro' (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (prf: ( (x: t1) -> (x' : t1) -> Lemma (requires (f x == f x')) (ensures (x == x')) )) : Lemma (synth_injective f) = Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x)) let parse_synth' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Tot (bare_parser t2) = fun b -> match parse p1 b with | None -> None | Some (x1, consumed) -> Some (f2 x1, consumed) val parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Pure (parser k t2) (requires ( synth_injective f2 )) (ensures (fun _ -> True)) val parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) let parse_synth_eq2 (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (sq: squash (synth_injective f2)) (b: bytes) : Lemma (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) = parse_synth_eq p1 f2 b val tot_parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) : Pure (tot_parser k t2) (requires ( synth_injective f2 )) (ensures (fun y -> forall x . parse y x == parse (parse_synth #k p1 f2) x )) let tot_parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b)) = parse_synth_eq #k p1 f2 b let bare_serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Tot (bare_serializer t2) = fun (x: t2) -> s1 (g1 x) val bare_serialize_synth_correct (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Lemma (requires ( (forall (x : t2) . f2 (g1 x) == x) /\ (forall (x x' : t1) . f2 x == f2 x' ==> x == x') )) (ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 ))) let synth_inverse (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : GTot Type0 = (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x) let synth_inverse_intro (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : Lemma (requires (forall (x : t2) . f2 (g1 x) == x)) (ensures (synth_inverse f2 g1)) = () let synth_inverse_intro' (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) (prf: (x: t2) -> Lemma (f2 (g1 x) == x)) : Lemma (ensures (synth_inverse f2 g1)) = Classical.forall_intro prf let synth_inverse_synth_injective_pat (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1)) : Lemma (requires (synth_inverse g f)) (ensures (synth_injective f)) [SMTPat (synth_inverse g f)] = assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2)) let synth_inverse_synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1)) : Lemma (requires (synth_inverse g f)) (ensures (synth_injective f)) = () let synth_inverse_synth_injective' (#t1: Type) (#t2: Type) (g: (t2 -> GTot t1)) (f: (t1 -> GTot t2)) (u: squash (synth_inverse g f)) : Tot (squash (synth_injective f)) = () let synth_injective_synth_inverse_synth_inverse_recip (#t1: Type) (#t2: Type) (g: (t2 -> GTot t1)) (f: (t1 -> GTot t2)) (u: squash (synth_inverse g f /\ synth_injective g)) : Tot (squash (synth_inverse f g)) = assert (forall x . g (f (g x)) == g x) val serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) : Tot (serializer (parse_synth p1 f2)) val serialize_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x: t2) : Lemma (serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x)) let serialize_synth_eq' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x: t2) (y1: bytes) (q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x)) (y2: bytes) (q2: squash (y2 == serialize s1 (g1 x))) : Lemma (ensures (y1 == y2)) = serialize_synth_eq p1 f2 s1 g1 u x let serialize_tot_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) : Tot (serializer #k (tot_parse_synth p1 f2)) = serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _ val serialize_synth_upd_chain (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x1: t1) (x2: t2) (y1: t1) (y2: t2) (i': nat) (s' : bytes) : Lemma (requires ( let s = serialize s1 x1 in i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_seq s i' s' /\ x2 == f2 x1 /\ y2 == f2 y1 )) (ensures ( let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\ serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s' )) val serialize_synth_upd_bw_chain (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x1: t1) (x2: t2) (y1: t1) (y2: t2) (i': nat) (s' : bytes) : Lemma (requires ( let s = serialize s1 x1 in i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_bw_seq s i' s' /\ x2 == f2 x1 /\ y2 == f2 y1 )) (ensures ( let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\ serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s' )) (* Strengthened versions of and_then *) inline_for_extraction let synth_tagged_union_data (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (tg: tag_t) (x: refine_with_tag tag_of_data tg) : Tot data_t = x let parse_tagged_union_payload (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (tg: tag_t) : Tot (parser k data_t) = parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) let parse_tagged_union_payload_and_then_cases_injective (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) : Lemma (and_then_cases_injective (parse_tagged_union_payload tag_of_data p)) = and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 -> parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1; parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2 ) val parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) : Tot (parser (and_then_kind kt k) data_t) val parse_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (input: bytes) : Lemma (parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end )) let bare_parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (k': (t: tag_t) -> Tot parser_kind) (p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t))) (input: bytes) : GTot (option (data_t * consumed_length input)) = match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end val parse_tagged_union_eq_gen (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (#kt': parser_kind) (pt': parser kt' tag_t) (lem_pt: ( (input: bytes) -> Lemma (parse pt input == parse pt' input) )) (k': (t: tag_t) -> Tot parser_kind) (p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t))) (lem_p' : ( (k: tag_t) -> (input: bytes) -> Lemma (parse (p k) input == parse (p' k) input) )) (input: bytes) : Lemma (parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input) let tot_parse_tagged_union_payload (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (tg: tag_t) : Pure (tot_parser k data_t) (requires True) (ensures (fun y -> forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x )) = tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) val tot_parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: tot_parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) : Pure (tot_parser (and_then_kind kt k) data_t) (requires True) (ensures (fun y -> forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x )) let tot_parse_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (pt: tot_parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (input: bytes) : Lemma (parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end )) = parse_tagged_union_eq #kt pt tag_of_data #k p input let bare_serialize_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Tot (bare_serializer data_t) = fun (d: data_t) -> let tg = tag_of_data d in Seq.append (st tg) (serialize (s tg) d) let seq_slice_append_l (#t: Type) (s1 s2: Seq.seq t) : Lemma (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1) = assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1) let seq_slice_append_r (#t: Type) (s1 s2: Seq.seq t) : Lemma (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2) = assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2) let bare_serialize_tagged_union_correct (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Lemma (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s))) = (* same proof as nondep_then *) let prf (x: data_t) : Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x))) = parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x); let t = tag_of_data x in let (u: refine_with_tag tag_of_data t) = x in let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in let v1 = parse pt (serialize st t) in assert (Some? v1); parser_kind_prop_equiv kt pt; assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); let (Some (_, len')) = parse pt (serialize st t) in assert (len' == Seq.length (serialize st t)); assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x)); assert (Seq.slice (serialize st t) 0 len' == st t); seq_slice_append_l (serialize st t) (serialize (s t) u); assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (Some? v1'); assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); let (Some (x1, len1)) = v1 in let (Some (x1', len1')) = v1' in assert (x1 == x1'); assert ((len1 <: nat) == (len1' <: nat)); assert (x1 == t); assert (len1 == Seq.length (serialize st t)); assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u)); seq_slice_append_r (serialize st t) (serialize (s t) u); () in Classical.forall_intro prf val serialize_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Pure (serializer (parse_tagged_union pt tag_of_data p)) (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (fun _ -> True)) val serialize_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) (input: data_t) : Lemma (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input)) [SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)] let serialize_tot_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: tot_parser kt tag_t) (st: serializer #kt pt) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer #k (p t))) : Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p)) (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (fun _ -> True)) = serialize_ext _ (serialize_tagged_union st tag_of_data s) _ (* Dependent pairs *) inline_for_extraction let synth_dtuple2 (#t1: Type) (#t2: t1 -> Type) (x: t1) (y: t2 x) : Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) = (| x, y |) let parse_dtuple2 (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) : Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2)) = parse_tagged_union p1 dfst (fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x)) inline_for_extraction let synth_dtuple2_recip (#t1: Type) (#t2: t1 -> Type) (x: t1) (y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) : Tot (t2 x) = dsnd y val serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) : Tot (serializer (parse_dtuple2 p1 p2)) val parse_dtuple2_eq (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) (b: bytes) : Lemma (parse (parse_dtuple2 p1 p2) b == (match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse (p2 x1) b' with | Some (x2, consumed2) -> Some ((| x1, x2 |), consumed1 + consumed2) | _ -> None end | _ -> None )) let bare_parse_dtuple2 (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) : Tot (bare_parser (dtuple2 t1 t2)) = fun b -> match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse (p2 x1) b' with | Some (x2, consumed2) -> Some ((| x1, x2 |), consumed1 + consumed2) | _ -> None end | _ -> None let parse_dtuple2_eq' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) (b: bytes) : Lemma (parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b) = parse_dtuple2_eq p1 p2 b val serialize_dtuple2_eq (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : Lemma (serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)) let bare_serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : GTot bytes = serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy) let serialize_dtuple2_eq' (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : Tot (squash ( (serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy))) = serialize_dtuple2_eq s1 s2 xy (* Special case for non-dependent parsing *) val nondep_then (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Tot (parser (and_then_kind k1 k2) (t1 * t2)) #set-options "--z3rlimit 16" val nondep_then_eq (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) (b: bytes) : Lemma (parse (nondep_then p1 p2) b == (match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse p2 b' with | Some (x2, consumed2) -> Some ((x1, x2), consumed1 + consumed2) | _ -> None end | _ -> None )) val tot_nondep_then (#k1: parser_kind) (#t1: Type) (p1: tot_parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: tot_parser k2 t2) : Pure (tot_parser (and_then_kind k1 k2) (t1 * t2)) (requires True) (ensures (fun y -> forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x )) let bare_serialize_nondep_then (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) (s2: serializer p2) : Tot (bare_serializer (t1 * t2)) = fun (x: t1 * t2) -> let (x1, x2) = x in Seq.append (s1 x1) (s2 x2) val serialize_nondep_then (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) : Tot (serializer (nondep_then p1 p2)) val serialize_nondep_then_eq (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (input: t1 * t2) : Lemma (serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input) val length_serialize_nondep_then (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (input1: t1) (input2: t2) : Lemma (Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2)) val serialize_nondep_then_upd_left (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) : Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s1 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y) )) val serialize_nondep_then_upd_left_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) (i' : nat) (s' : bytes) : Lemma (requires ( let s1' = serialize s1 (fst x) in i' + Seq.length s' <= Seq.length s1' /\ serialize s1 y == seq_upd_seq s1' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s' )) val serialize_nondep_then_upd_bw_left (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) : Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let len2 = Seq.length (serialize s2 (snd x)) in len2 + Seq.length (serialize s1 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y) )) #reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'" val serialize_nondep_then_upd_bw_left_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) (i' : nat) (s' : bytes) : Lemma (requires ( let s1' = serialize s1 (fst x) in i' + Seq.length s' <= Seq.length s1' /\ serialize s1 y == seq_upd_bw_seq s1' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let len2 = Seq.length (serialize s2 (snd x)) in len2 + i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s' )) val serialize_nondep_then_upd_right (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) : Lemma (requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s2 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y) )) val serialize_nondep_then_upd_right_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) (i' : nat) (s' : bytes) : Lemma (requires ( let s2' = serialize s2 (snd x) in i' + Seq.length s' <= Seq.length s2' /\ serialize s2 y == seq_upd_seq s2' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let l1 = Seq.length (serialize s1 (fst x)) in Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\ l1 + i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s' )) let serialize_nondep_then_upd_bw_right (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) : Lemma (requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s2 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y) )) = serialize_nondep_then_upd_right s1 s2 x y let serialize_nondep_then_upd_bw_right_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) (i' : nat) (s' : bytes) : Lemma (requires ( let s2' = serialize s2 (snd x) in i' + Seq.length s' <= Seq.length s2' /\ serialize s2 y == seq_upd_bw_seq s2' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let l1 = Seq.length (serialize s1 (fst x)) in Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\ i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s' )) = let s2' = serialize s2 (snd x) in let j' = Seq.length s2' - i' - Seq.length s' in assert (j' + Seq.length s' <= Seq.length s2'); assert (serialize s2 y == seq_upd_seq s2' j' s'); let s = serialize (serialize_nondep_then s1 s2) x in serialize_nondep_then_upd_right_chain s1 s2 x y j' s'; assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s'); () #reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Apply a total transformation on parsed data *) let parse_strengthen_prf (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) : Tot Type = (xbytes: bytes) -> (consumed: consumed_length xbytes) -> (x: t1) -> Lemma (requires (parse p1 xbytes == Some (x, consumed))) (ensures (p2 x)) let bare_parse_strengthen (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Tot (bare_parser (x: t1 { p2 x } )) = fun (xbytes: bytes) -> match parse p1 xbytes with | Some (x, consumed) -> prf xbytes consumed x; let (x' : t1 { p2 x' } ) = x in Some (x', consumed) | _ -> None let bare_parse_strengthen_no_lookahead (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf)) = let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2) let bare_parse_strengthen_injective (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (injective (bare_parse_strengthen p1 p2 prf)) = parser_kind_prop_equiv k p1; let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2); assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2) let bare_parse_strengthen_correct (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (injective (bare_parse_strengthen p1 p2 prf) /\ parser_kind_prop k (bare_parse_strengthen p1 p2 prf)) = parser_kind_prop_equiv k p1; bare_parse_strengthen_no_lookahead p1 p2 prf; bare_parse_strengthen_injective p1 p2 prf; parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf); () let parse_strengthen (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Tot (parser k (x: t1 { p2 x } )) = bare_parse_strengthen_correct p1 p2 prf; bare_parse_strengthen p1 p2 prf let serialize_strengthen' (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1 { p2 input } ) : GTot bytes = serialize s input let serialize_strengthen_correct (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1 { p2 input } ) : Lemma (let output = serialize_strengthen' p2 prf s input in parse (parse_strengthen p1 p2 prf) output == Some (input, Seq.length output)) = () let serialize_strengthen (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) : Tot (serializer (parse_strengthen p1 p2 prf)) = Classical.forall_intro (serialize_strengthen_correct p2 prf s); serialize_strengthen' p2 prf s let compose (#t1 #t2 #t3: Type) (f1: t1 -> GTot t2) (f2: t2 -> GTot t3) (x: t1) : GTot t3 = let y1 = f1 x in f2 y1 val make_total_constant_size_parser_compose (sz: nat) (t1 t2: Type) (f1: ((s: bytes {Seq.length s == sz}) -> GTot t1)) (g2: t1 -> GTot t2) : Lemma (requires ( make_total_constant_size_parser_precond sz t1 f1 /\ (forall x x' . g2 x == g2 x' ==> x == x') )) (ensures ( make_total_constant_size_parser_precond sz t1 f1 /\ make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\ (forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\ (forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input) )) (** Tot vs. Ghost *) unfold let lift_parser' (#k: parser_kind) (#t: Type) (f: unit -> GTot (parser k t)) : Tot (bare_parser t) = fun (input: bytes) -> parse (f ()) input let lift_parser_correct (#k: parser_kind) (#t: Type) (f: unit -> GTot (parser k t)) : Lemma (parser_kind_prop k (lift_parser' f)) = parser_kind_prop_ext k (f ()) (lift_parser' f) let lift_parser (#k: parser_kind) (#t: Type) (f: unit -> GTot (parser k t)) : Tot (parser k t) = lift_parser_correct f; lift_parser' f unfold let lift_serializer' (#k: parser_kind) (#t: Type) (#f: unit -> GTot (parser k t)) (s: unit -> GTot (serializer (f ())))
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 32, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val lift_serializer' (#k: parser_kind) (#t: Type) (#f: (unit -> GTot (parser k t))) (s: (unit -> GTot (serializer (f ())))) : Tot (bare_serializer t)
[]
LowParse.Spec.Combinators.lift_serializer'
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
s: (_: Prims.unit -> Prims.GTot (LowParse.Spec.Base.serializer (f ()))) -> LowParse.Spec.Base.bare_serializer t
{ "end_col": 34, "end_line": 1772, "start_col": 2, "start_line": 1772 }
Prims.GTot
val bare_serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong}) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1 -> parser k2 (t2 x))) (s2: (x: t1 -> serializer (p2 x))) (xy: dtuple2 t1 t2) : GTot bytes
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let bare_serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : GTot bytes = serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)
val bare_serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong}) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1 -> parser k2 (t2 x))) (s2: (x: t1 -> serializer (p2 x))) (xy: dtuple2 t1 t2) : GTot bytes let bare_serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong}) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1 -> parser k2 (t2 x))) (s2: (x: t1 -> serializer (p2 x))) (xy: dtuple2 t1 t2) : GTot bytes =
false
null
false
(serialize s1 (dfst xy)) `Seq.append` (serialize (s2 (dfst xy)) (dsnd xy))
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "sometrivial" ]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "Prims.eq2", "FStar.Pervasives.Native.option", "LowParse.Spec.Base.parser_subkind", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.ParserStrong", "Prims.dtuple2", "FStar.Seq.Base.append", "LowParse.Bytes.byte", "LowParse.Spec.Base.serialize", "FStar.Pervasives.dfst", "FStar.Pervasives.dsnd", "LowParse.Bytes.bytes" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else () #set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" let and_then_correct (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') /\ parser_kind_prop (and_then_kind k k') (and_then_bare p p') )) = parser_kind_prop_equiv k p; Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x)); parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p'); and_then_injective p p'; and_then_no_lookahead p p' #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" val and_then (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Pure (parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun _ -> True)) val and_then_eq (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) (input: bytes) : Lemma (requires (and_then_cases_injective p')) (ensures (parse (and_then p p') input == and_then_bare p p' input)) val tot_and_then (#k: parser_kind) (#t:Type) (p:tot_parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (tot_parser k' t'))) : Pure (tot_parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun y -> forall x . parse y x == parse (and_then #k p #k' p') x )) /// monadic return for the parser monad unfold let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') = fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) unfold let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') = [@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in parse_fret' f v let synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 = forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x' let synth_injective_intro (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : Lemma (requires (forall (x x' : t1) . f x == f x' ==> x == x')) (ensures (synth_injective f)) = () let synth_injective_intro' (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (prf: ( (x: t1) -> (x' : t1) -> Lemma (requires (f x == f x')) (ensures (x == x')) )) : Lemma (synth_injective f) = Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x)) let parse_synth' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Tot (bare_parser t2) = fun b -> match parse p1 b with | None -> None | Some (x1, consumed) -> Some (f2 x1, consumed) val parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Pure (parser k t2) (requires ( synth_injective f2 )) (ensures (fun _ -> True)) val parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) let parse_synth_eq2 (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (sq: squash (synth_injective f2)) (b: bytes) : Lemma (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) = parse_synth_eq p1 f2 b val tot_parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) : Pure (tot_parser k t2) (requires ( synth_injective f2 )) (ensures (fun y -> forall x . parse y x == parse (parse_synth #k p1 f2) x )) let tot_parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b)) = parse_synth_eq #k p1 f2 b let bare_serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Tot (bare_serializer t2) = fun (x: t2) -> s1 (g1 x) val bare_serialize_synth_correct (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Lemma (requires ( (forall (x : t2) . f2 (g1 x) == x) /\ (forall (x x' : t1) . f2 x == f2 x' ==> x == x') )) (ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 ))) let synth_inverse (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : GTot Type0 = (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x) let synth_inverse_intro (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : Lemma (requires (forall (x : t2) . f2 (g1 x) == x)) (ensures (synth_inverse f2 g1)) = () let synth_inverse_intro' (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) (prf: (x: t2) -> Lemma (f2 (g1 x) == x)) : Lemma (ensures (synth_inverse f2 g1)) = Classical.forall_intro prf let synth_inverse_synth_injective_pat (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1)) : Lemma (requires (synth_inverse g f)) (ensures (synth_injective f)) [SMTPat (synth_inverse g f)] = assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2)) let synth_inverse_synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1)) : Lemma (requires (synth_inverse g f)) (ensures (synth_injective f)) = () let synth_inverse_synth_injective' (#t1: Type) (#t2: Type) (g: (t2 -> GTot t1)) (f: (t1 -> GTot t2)) (u: squash (synth_inverse g f)) : Tot (squash (synth_injective f)) = () let synth_injective_synth_inverse_synth_inverse_recip (#t1: Type) (#t2: Type) (g: (t2 -> GTot t1)) (f: (t1 -> GTot t2)) (u: squash (synth_inverse g f /\ synth_injective g)) : Tot (squash (synth_inverse f g)) = assert (forall x . g (f (g x)) == g x) val serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) : Tot (serializer (parse_synth p1 f2)) val serialize_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x: t2) : Lemma (serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x)) let serialize_synth_eq' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x: t2) (y1: bytes) (q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x)) (y2: bytes) (q2: squash (y2 == serialize s1 (g1 x))) : Lemma (ensures (y1 == y2)) = serialize_synth_eq p1 f2 s1 g1 u x let serialize_tot_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) : Tot (serializer #k (tot_parse_synth p1 f2)) = serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _ val serialize_synth_upd_chain (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x1: t1) (x2: t2) (y1: t1) (y2: t2) (i': nat) (s' : bytes) : Lemma (requires ( let s = serialize s1 x1 in i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_seq s i' s' /\ x2 == f2 x1 /\ y2 == f2 y1 )) (ensures ( let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\ serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s' )) val serialize_synth_upd_bw_chain (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x1: t1) (x2: t2) (y1: t1) (y2: t2) (i': nat) (s' : bytes) : Lemma (requires ( let s = serialize s1 x1 in i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_bw_seq s i' s' /\ x2 == f2 x1 /\ y2 == f2 y1 )) (ensures ( let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\ serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s' )) (* Strengthened versions of and_then *) inline_for_extraction let synth_tagged_union_data (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (tg: tag_t) (x: refine_with_tag tag_of_data tg) : Tot data_t = x let parse_tagged_union_payload (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (tg: tag_t) : Tot (parser k data_t) = parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) let parse_tagged_union_payload_and_then_cases_injective (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) : Lemma (and_then_cases_injective (parse_tagged_union_payload tag_of_data p)) = and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 -> parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1; parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2 ) val parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) : Tot (parser (and_then_kind kt k) data_t) val parse_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (input: bytes) : Lemma (parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end )) let bare_parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (k': (t: tag_t) -> Tot parser_kind) (p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t))) (input: bytes) : GTot (option (data_t * consumed_length input)) = match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end val parse_tagged_union_eq_gen (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (#kt': parser_kind) (pt': parser kt' tag_t) (lem_pt: ( (input: bytes) -> Lemma (parse pt input == parse pt' input) )) (k': (t: tag_t) -> Tot parser_kind) (p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t))) (lem_p' : ( (k: tag_t) -> (input: bytes) -> Lemma (parse (p k) input == parse (p' k) input) )) (input: bytes) : Lemma (parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input) let tot_parse_tagged_union_payload (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (tg: tag_t) : Pure (tot_parser k data_t) (requires True) (ensures (fun y -> forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x )) = tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) val tot_parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: tot_parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) : Pure (tot_parser (and_then_kind kt k) data_t) (requires True) (ensures (fun y -> forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x )) let tot_parse_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (pt: tot_parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (input: bytes) : Lemma (parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end )) = parse_tagged_union_eq #kt pt tag_of_data #k p input let bare_serialize_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Tot (bare_serializer data_t) = fun (d: data_t) -> let tg = tag_of_data d in Seq.append (st tg) (serialize (s tg) d) let seq_slice_append_l (#t: Type) (s1 s2: Seq.seq t) : Lemma (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1) = assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1) let seq_slice_append_r (#t: Type) (s1 s2: Seq.seq t) : Lemma (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2) = assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2) let bare_serialize_tagged_union_correct (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Lemma (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s))) = (* same proof as nondep_then *) let prf (x: data_t) : Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x))) = parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x); let t = tag_of_data x in let (u: refine_with_tag tag_of_data t) = x in let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in let v1 = parse pt (serialize st t) in assert (Some? v1); parser_kind_prop_equiv kt pt; assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); let (Some (_, len')) = parse pt (serialize st t) in assert (len' == Seq.length (serialize st t)); assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x)); assert (Seq.slice (serialize st t) 0 len' == st t); seq_slice_append_l (serialize st t) (serialize (s t) u); assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (Some? v1'); assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); let (Some (x1, len1)) = v1 in let (Some (x1', len1')) = v1' in assert (x1 == x1'); assert ((len1 <: nat) == (len1' <: nat)); assert (x1 == t); assert (len1 == Seq.length (serialize st t)); assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u)); seq_slice_append_r (serialize st t) (serialize (s t) u); () in Classical.forall_intro prf val serialize_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Pure (serializer (parse_tagged_union pt tag_of_data p)) (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (fun _ -> True)) val serialize_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) (input: data_t) : Lemma (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input)) [SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)] let serialize_tot_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: tot_parser kt tag_t) (st: serializer #kt pt) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer #k (p t))) : Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p)) (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (fun _ -> True)) = serialize_ext _ (serialize_tagged_union st tag_of_data s) _ (* Dependent pairs *) inline_for_extraction let synth_dtuple2 (#t1: Type) (#t2: t1 -> Type) (x: t1) (y: t2 x) : Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) = (| x, y |) let parse_dtuple2 (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) : Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2)) = parse_tagged_union p1 dfst (fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x)) inline_for_extraction let synth_dtuple2_recip (#t1: Type) (#t2: t1 -> Type) (x: t1) (y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) : Tot (t2 x) = dsnd y val serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) : Tot (serializer (parse_dtuple2 p1 p2)) val parse_dtuple2_eq (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) (b: bytes) : Lemma (parse (parse_dtuple2 p1 p2) b == (match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse (p2 x1) b' with | Some (x2, consumed2) -> Some ((| x1, x2 |), consumed1 + consumed2) | _ -> None end | _ -> None )) let bare_parse_dtuple2 (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) : Tot (bare_parser (dtuple2 t1 t2)) = fun b -> match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse (p2 x1) b' with | Some (x2, consumed2) -> Some ((| x1, x2 |), consumed1 + consumed2) | _ -> None end | _ -> None let parse_dtuple2_eq' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) (b: bytes) : Lemma (parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b) = parse_dtuple2_eq p1 p2 b val serialize_dtuple2_eq (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : Lemma (serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)) let bare_serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2)
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val bare_serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong}) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1 -> parser k2 (t2 x))) (s2: (x: t1 -> serializer (p2 x))) (xy: dtuple2 t1 t2) : GTot bytes
[]
LowParse.Spec.Combinators.bare_serialize_dtuple2
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
s1: LowParse.Spec.Base.serializer p1 { Mkparser_kind'?.parser_kind_subkind k1 == FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } -> s2: (x: t1 -> LowParse.Spec.Base.serializer (p2 x)) -> xy: Prims.dtuple2 t1 t2 -> Prims.GTot LowParse.Bytes.bytes
{ "end_col": 72, "end_line": 1294, "start_col": 2, "start_line": 1294 }
Prims.GTot
val serialize_strengthen' (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: (t1 -> GTot Type0)) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1{p2 input}) : GTot bytes
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let serialize_strengthen' (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1 { p2 input } ) : GTot bytes = serialize s input
val serialize_strengthen' (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: (t1 -> GTot Type0)) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1{p2 input}) : GTot bytes let serialize_strengthen' (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: (t1 -> GTot Type0)) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1{p2 input}) : GTot bytes =
false
null
false
serialize s input
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "sometrivial" ]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Combinators.parse_strengthen_prf", "LowParse.Spec.Base.serializer", "LowParse.Spec.Base.serialize", "LowParse.Bytes.bytes" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else () #set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" let and_then_correct (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') /\ parser_kind_prop (and_then_kind k k') (and_then_bare p p') )) = parser_kind_prop_equiv k p; Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x)); parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p'); and_then_injective p p'; and_then_no_lookahead p p' #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" val and_then (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Pure (parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun _ -> True)) val and_then_eq (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) (input: bytes) : Lemma (requires (and_then_cases_injective p')) (ensures (parse (and_then p p') input == and_then_bare p p' input)) val tot_and_then (#k: parser_kind) (#t:Type) (p:tot_parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (tot_parser k' t'))) : Pure (tot_parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun y -> forall x . parse y x == parse (and_then #k p #k' p') x )) /// monadic return for the parser monad unfold let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') = fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) unfold let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') = [@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in parse_fret' f v let synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 = forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x' let synth_injective_intro (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : Lemma (requires (forall (x x' : t1) . f x == f x' ==> x == x')) (ensures (synth_injective f)) = () let synth_injective_intro' (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (prf: ( (x: t1) -> (x' : t1) -> Lemma (requires (f x == f x')) (ensures (x == x')) )) : Lemma (synth_injective f) = Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x)) let parse_synth' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Tot (bare_parser t2) = fun b -> match parse p1 b with | None -> None | Some (x1, consumed) -> Some (f2 x1, consumed) val parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Pure (parser k t2) (requires ( synth_injective f2 )) (ensures (fun _ -> True)) val parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) let parse_synth_eq2 (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (sq: squash (synth_injective f2)) (b: bytes) : Lemma (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) = parse_synth_eq p1 f2 b val tot_parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) : Pure (tot_parser k t2) (requires ( synth_injective f2 )) (ensures (fun y -> forall x . parse y x == parse (parse_synth #k p1 f2) x )) let tot_parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b)) = parse_synth_eq #k p1 f2 b let bare_serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Tot (bare_serializer t2) = fun (x: t2) -> s1 (g1 x) val bare_serialize_synth_correct (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Lemma (requires ( (forall (x : t2) . f2 (g1 x) == x) /\ (forall (x x' : t1) . f2 x == f2 x' ==> x == x') )) (ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 ))) let synth_inverse (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : GTot Type0 = (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x) let synth_inverse_intro (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : Lemma (requires (forall (x : t2) . f2 (g1 x) == x)) (ensures (synth_inverse f2 g1)) = () let synth_inverse_intro' (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) (prf: (x: t2) -> Lemma (f2 (g1 x) == x)) : Lemma (ensures (synth_inverse f2 g1)) = Classical.forall_intro prf let synth_inverse_synth_injective_pat (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1)) : Lemma (requires (synth_inverse g f)) (ensures (synth_injective f)) [SMTPat (synth_inverse g f)] = assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2)) let synth_inverse_synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1)) : Lemma (requires (synth_inverse g f)) (ensures (synth_injective f)) = () let synth_inverse_synth_injective' (#t1: Type) (#t2: Type) (g: (t2 -> GTot t1)) (f: (t1 -> GTot t2)) (u: squash (synth_inverse g f)) : Tot (squash (synth_injective f)) = () let synth_injective_synth_inverse_synth_inverse_recip (#t1: Type) (#t2: Type) (g: (t2 -> GTot t1)) (f: (t1 -> GTot t2)) (u: squash (synth_inverse g f /\ synth_injective g)) : Tot (squash (synth_inverse f g)) = assert (forall x . g (f (g x)) == g x) val serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) : Tot (serializer (parse_synth p1 f2)) val serialize_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x: t2) : Lemma (serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x)) let serialize_synth_eq' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x: t2) (y1: bytes) (q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x)) (y2: bytes) (q2: squash (y2 == serialize s1 (g1 x))) : Lemma (ensures (y1 == y2)) = serialize_synth_eq p1 f2 s1 g1 u x let serialize_tot_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) : Tot (serializer #k (tot_parse_synth p1 f2)) = serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _ val serialize_synth_upd_chain (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x1: t1) (x2: t2) (y1: t1) (y2: t2) (i': nat) (s' : bytes) : Lemma (requires ( let s = serialize s1 x1 in i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_seq s i' s' /\ x2 == f2 x1 /\ y2 == f2 y1 )) (ensures ( let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\ serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s' )) val serialize_synth_upd_bw_chain (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x1: t1) (x2: t2) (y1: t1) (y2: t2) (i': nat) (s' : bytes) : Lemma (requires ( let s = serialize s1 x1 in i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_bw_seq s i' s' /\ x2 == f2 x1 /\ y2 == f2 y1 )) (ensures ( let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\ serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s' )) (* Strengthened versions of and_then *) inline_for_extraction let synth_tagged_union_data (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (tg: tag_t) (x: refine_with_tag tag_of_data tg) : Tot data_t = x let parse_tagged_union_payload (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (tg: tag_t) : Tot (parser k data_t) = parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) let parse_tagged_union_payload_and_then_cases_injective (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) : Lemma (and_then_cases_injective (parse_tagged_union_payload tag_of_data p)) = and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 -> parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1; parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2 ) val parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) : Tot (parser (and_then_kind kt k) data_t) val parse_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (input: bytes) : Lemma (parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end )) let bare_parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (k': (t: tag_t) -> Tot parser_kind) (p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t))) (input: bytes) : GTot (option (data_t * consumed_length input)) = match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end val parse_tagged_union_eq_gen (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (#kt': parser_kind) (pt': parser kt' tag_t) (lem_pt: ( (input: bytes) -> Lemma (parse pt input == parse pt' input) )) (k': (t: tag_t) -> Tot parser_kind) (p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t))) (lem_p' : ( (k: tag_t) -> (input: bytes) -> Lemma (parse (p k) input == parse (p' k) input) )) (input: bytes) : Lemma (parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input) let tot_parse_tagged_union_payload (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (tg: tag_t) : Pure (tot_parser k data_t) (requires True) (ensures (fun y -> forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x )) = tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) val tot_parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: tot_parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) : Pure (tot_parser (and_then_kind kt k) data_t) (requires True) (ensures (fun y -> forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x )) let tot_parse_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (pt: tot_parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (input: bytes) : Lemma (parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end )) = parse_tagged_union_eq #kt pt tag_of_data #k p input let bare_serialize_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Tot (bare_serializer data_t) = fun (d: data_t) -> let tg = tag_of_data d in Seq.append (st tg) (serialize (s tg) d) let seq_slice_append_l (#t: Type) (s1 s2: Seq.seq t) : Lemma (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1) = assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1) let seq_slice_append_r (#t: Type) (s1 s2: Seq.seq t) : Lemma (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2) = assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2) let bare_serialize_tagged_union_correct (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Lemma (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s))) = (* same proof as nondep_then *) let prf (x: data_t) : Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x))) = parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x); let t = tag_of_data x in let (u: refine_with_tag tag_of_data t) = x in let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in let v1 = parse pt (serialize st t) in assert (Some? v1); parser_kind_prop_equiv kt pt; assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); let (Some (_, len')) = parse pt (serialize st t) in assert (len' == Seq.length (serialize st t)); assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x)); assert (Seq.slice (serialize st t) 0 len' == st t); seq_slice_append_l (serialize st t) (serialize (s t) u); assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (Some? v1'); assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); let (Some (x1, len1)) = v1 in let (Some (x1', len1')) = v1' in assert (x1 == x1'); assert ((len1 <: nat) == (len1' <: nat)); assert (x1 == t); assert (len1 == Seq.length (serialize st t)); assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u)); seq_slice_append_r (serialize st t) (serialize (s t) u); () in Classical.forall_intro prf val serialize_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Pure (serializer (parse_tagged_union pt tag_of_data p)) (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (fun _ -> True)) val serialize_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) (input: data_t) : Lemma (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input)) [SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)] let serialize_tot_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: tot_parser kt tag_t) (st: serializer #kt pt) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer #k (p t))) : Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p)) (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (fun _ -> True)) = serialize_ext _ (serialize_tagged_union st tag_of_data s) _ (* Dependent pairs *) inline_for_extraction let synth_dtuple2 (#t1: Type) (#t2: t1 -> Type) (x: t1) (y: t2 x) : Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) = (| x, y |) let parse_dtuple2 (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) : Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2)) = parse_tagged_union p1 dfst (fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x)) inline_for_extraction let synth_dtuple2_recip (#t1: Type) (#t2: t1 -> Type) (x: t1) (y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) : Tot (t2 x) = dsnd y val serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) : Tot (serializer (parse_dtuple2 p1 p2)) val parse_dtuple2_eq (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) (b: bytes) : Lemma (parse (parse_dtuple2 p1 p2) b == (match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse (p2 x1) b' with | Some (x2, consumed2) -> Some ((| x1, x2 |), consumed1 + consumed2) | _ -> None end | _ -> None )) let bare_parse_dtuple2 (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) : Tot (bare_parser (dtuple2 t1 t2)) = fun b -> match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse (p2 x1) b' with | Some (x2, consumed2) -> Some ((| x1, x2 |), consumed1 + consumed2) | _ -> None end | _ -> None let parse_dtuple2_eq' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) (b: bytes) : Lemma (parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b) = parse_dtuple2_eq p1 p2 b val serialize_dtuple2_eq (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : Lemma (serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)) let bare_serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : GTot bytes = serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy) let serialize_dtuple2_eq' (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : Tot (squash ( (serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy))) = serialize_dtuple2_eq s1 s2 xy (* Special case for non-dependent parsing *) val nondep_then (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Tot (parser (and_then_kind k1 k2) (t1 * t2)) #set-options "--z3rlimit 16" val nondep_then_eq (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) (b: bytes) : Lemma (parse (nondep_then p1 p2) b == (match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse p2 b' with | Some (x2, consumed2) -> Some ((x1, x2), consumed1 + consumed2) | _ -> None end | _ -> None )) val tot_nondep_then (#k1: parser_kind) (#t1: Type) (p1: tot_parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: tot_parser k2 t2) : Pure (tot_parser (and_then_kind k1 k2) (t1 * t2)) (requires True) (ensures (fun y -> forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x )) let bare_serialize_nondep_then (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) (s2: serializer p2) : Tot (bare_serializer (t1 * t2)) = fun (x: t1 * t2) -> let (x1, x2) = x in Seq.append (s1 x1) (s2 x2) val serialize_nondep_then (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) : Tot (serializer (nondep_then p1 p2)) val serialize_nondep_then_eq (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (input: t1 * t2) : Lemma (serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input) val length_serialize_nondep_then (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (input1: t1) (input2: t2) : Lemma (Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2)) val serialize_nondep_then_upd_left (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) : Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s1 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y) )) val serialize_nondep_then_upd_left_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) (i' : nat) (s' : bytes) : Lemma (requires ( let s1' = serialize s1 (fst x) in i' + Seq.length s' <= Seq.length s1' /\ serialize s1 y == seq_upd_seq s1' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s' )) val serialize_nondep_then_upd_bw_left (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) : Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let len2 = Seq.length (serialize s2 (snd x)) in len2 + Seq.length (serialize s1 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y) )) #reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'" val serialize_nondep_then_upd_bw_left_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) (i' : nat) (s' : bytes) : Lemma (requires ( let s1' = serialize s1 (fst x) in i' + Seq.length s' <= Seq.length s1' /\ serialize s1 y == seq_upd_bw_seq s1' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let len2 = Seq.length (serialize s2 (snd x)) in len2 + i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s' )) val serialize_nondep_then_upd_right (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) : Lemma (requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s2 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y) )) val serialize_nondep_then_upd_right_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) (i' : nat) (s' : bytes) : Lemma (requires ( let s2' = serialize s2 (snd x) in i' + Seq.length s' <= Seq.length s2' /\ serialize s2 y == seq_upd_seq s2' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let l1 = Seq.length (serialize s1 (fst x)) in Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\ l1 + i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s' )) let serialize_nondep_then_upd_bw_right (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) : Lemma (requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s2 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y) )) = serialize_nondep_then_upd_right s1 s2 x y let serialize_nondep_then_upd_bw_right_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) (i' : nat) (s' : bytes) : Lemma (requires ( let s2' = serialize s2 (snd x) in i' + Seq.length s' <= Seq.length s2' /\ serialize s2 y == seq_upd_bw_seq s2' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let l1 = Seq.length (serialize s1 (fst x)) in Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\ i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s' )) = let s2' = serialize s2 (snd x) in let j' = Seq.length s2' - i' - Seq.length s' in assert (j' + Seq.length s' <= Seq.length s2'); assert (serialize s2 y == seq_upd_seq s2' j' s'); let s = serialize (serialize_nondep_then s1 s2) x in serialize_nondep_then_upd_right_chain s1 s2 x y j' s'; assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s'); () #reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Apply a total transformation on parsed data *) let parse_strengthen_prf (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) : Tot Type = (xbytes: bytes) -> (consumed: consumed_length xbytes) -> (x: t1) -> Lemma (requires (parse p1 xbytes == Some (x, consumed))) (ensures (p2 x)) let bare_parse_strengthen (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Tot (bare_parser (x: t1 { p2 x } )) = fun (xbytes: bytes) -> match parse p1 xbytes with | Some (x, consumed) -> prf xbytes consumed x; let (x' : t1 { p2 x' } ) = x in Some (x', consumed) | _ -> None let bare_parse_strengthen_no_lookahead (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf)) = let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2) let bare_parse_strengthen_injective (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (injective (bare_parse_strengthen p1 p2 prf)) = parser_kind_prop_equiv k p1; let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2); assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2) let bare_parse_strengthen_correct (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (injective (bare_parse_strengthen p1 p2 prf) /\ parser_kind_prop k (bare_parse_strengthen p1 p2 prf)) = parser_kind_prop_equiv k p1; bare_parse_strengthen_no_lookahead p1 p2 prf; bare_parse_strengthen_injective p1 p2 prf; parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf); () let parse_strengthen (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Tot (parser k (x: t1 { p2 x } )) = bare_parse_strengthen_correct p1 p2 prf; bare_parse_strengthen p1 p2 prf let serialize_strengthen' (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1 { p2 input } )
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 32, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val serialize_strengthen' (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: (t1 -> GTot Type0)) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1{p2 input}) : GTot bytes
[]
LowParse.Spec.Combinators.serialize_strengthen'
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
p2: (_: t1 -> Prims.GTot Type0) -> prf: LowParse.Spec.Combinators.parse_strengthen_prf p1 p2 -> s: LowParse.Spec.Base.serializer p1 -> input: t1{p2 input} -> Prims.GTot LowParse.Bytes.bytes
{ "end_col": 19, "end_line": 1692, "start_col": 2, "start_line": 1692 }
Prims.Tot
val lift_parser' (#k: parser_kind) (#t: Type) (f: (unit -> GTot (parser k t))) : Tot (bare_parser t)
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let lift_parser' (#k: parser_kind) (#t: Type) (f: unit -> GTot (parser k t)) : Tot (bare_parser t) = fun (input: bytes) -> parse (f ()) input
val lift_parser' (#k: parser_kind) (#t: Type) (f: (unit -> GTot (parser k t))) : Tot (bare_parser t) let lift_parser' (#k: parser_kind) (#t: Type) (f: (unit -> GTot (parser k t))) : Tot (bare_parser t) =
false
null
false
fun (input: bytes) -> parse (f ()) input
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Base.parser_kind", "Prims.unit", "LowParse.Spec.Base.parser", "LowParse.Bytes.bytes", "LowParse.Spec.Base.parse", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.bare_parser" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else () #set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" let and_then_correct (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') /\ parser_kind_prop (and_then_kind k k') (and_then_bare p p') )) = parser_kind_prop_equiv k p; Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x)); parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p'); and_then_injective p p'; and_then_no_lookahead p p' #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" val and_then (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Pure (parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun _ -> True)) val and_then_eq (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) (input: bytes) : Lemma (requires (and_then_cases_injective p')) (ensures (parse (and_then p p') input == and_then_bare p p' input)) val tot_and_then (#k: parser_kind) (#t:Type) (p:tot_parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (tot_parser k' t'))) : Pure (tot_parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun y -> forall x . parse y x == parse (and_then #k p #k' p') x )) /// monadic return for the parser monad unfold let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') = fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) unfold let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') = [@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in parse_fret' f v let synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 = forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x' let synth_injective_intro (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : Lemma (requires (forall (x x' : t1) . f x == f x' ==> x == x')) (ensures (synth_injective f)) = () let synth_injective_intro' (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (prf: ( (x: t1) -> (x' : t1) -> Lemma (requires (f x == f x')) (ensures (x == x')) )) : Lemma (synth_injective f) = Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x)) let parse_synth' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Tot (bare_parser t2) = fun b -> match parse p1 b with | None -> None | Some (x1, consumed) -> Some (f2 x1, consumed) val parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Pure (parser k t2) (requires ( synth_injective f2 )) (ensures (fun _ -> True)) val parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) let parse_synth_eq2 (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (sq: squash (synth_injective f2)) (b: bytes) : Lemma (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) = parse_synth_eq p1 f2 b val tot_parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) : Pure (tot_parser k t2) (requires ( synth_injective f2 )) (ensures (fun y -> forall x . parse y x == parse (parse_synth #k p1 f2) x )) let tot_parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b)) = parse_synth_eq #k p1 f2 b let bare_serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Tot (bare_serializer t2) = fun (x: t2) -> s1 (g1 x) val bare_serialize_synth_correct (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Lemma (requires ( (forall (x : t2) . f2 (g1 x) == x) /\ (forall (x x' : t1) . f2 x == f2 x' ==> x == x') )) (ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 ))) let synth_inverse (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : GTot Type0 = (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x) let synth_inverse_intro (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : Lemma (requires (forall (x : t2) . f2 (g1 x) == x)) (ensures (synth_inverse f2 g1)) = () let synth_inverse_intro' (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) (prf: (x: t2) -> Lemma (f2 (g1 x) == x)) : Lemma (ensures (synth_inverse f2 g1)) = Classical.forall_intro prf let synth_inverse_synth_injective_pat (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1)) : Lemma (requires (synth_inverse g f)) (ensures (synth_injective f)) [SMTPat (synth_inverse g f)] = assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2)) let synth_inverse_synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1)) : Lemma (requires (synth_inverse g f)) (ensures (synth_injective f)) = () let synth_inverse_synth_injective' (#t1: Type) (#t2: Type) (g: (t2 -> GTot t1)) (f: (t1 -> GTot t2)) (u: squash (synth_inverse g f)) : Tot (squash (synth_injective f)) = () let synth_injective_synth_inverse_synth_inverse_recip (#t1: Type) (#t2: Type) (g: (t2 -> GTot t1)) (f: (t1 -> GTot t2)) (u: squash (synth_inverse g f /\ synth_injective g)) : Tot (squash (synth_inverse f g)) = assert (forall x . g (f (g x)) == g x) val serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) : Tot (serializer (parse_synth p1 f2)) val serialize_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x: t2) : Lemma (serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x)) let serialize_synth_eq' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x: t2) (y1: bytes) (q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x)) (y2: bytes) (q2: squash (y2 == serialize s1 (g1 x))) : Lemma (ensures (y1 == y2)) = serialize_synth_eq p1 f2 s1 g1 u x let serialize_tot_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) : Tot (serializer #k (tot_parse_synth p1 f2)) = serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _ val serialize_synth_upd_chain (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x1: t1) (x2: t2) (y1: t1) (y2: t2) (i': nat) (s' : bytes) : Lemma (requires ( let s = serialize s1 x1 in i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_seq s i' s' /\ x2 == f2 x1 /\ y2 == f2 y1 )) (ensures ( let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\ serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s' )) val serialize_synth_upd_bw_chain (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x1: t1) (x2: t2) (y1: t1) (y2: t2) (i': nat) (s' : bytes) : Lemma (requires ( let s = serialize s1 x1 in i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_bw_seq s i' s' /\ x2 == f2 x1 /\ y2 == f2 y1 )) (ensures ( let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\ serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s' )) (* Strengthened versions of and_then *) inline_for_extraction let synth_tagged_union_data (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (tg: tag_t) (x: refine_with_tag tag_of_data tg) : Tot data_t = x let parse_tagged_union_payload (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (tg: tag_t) : Tot (parser k data_t) = parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) let parse_tagged_union_payload_and_then_cases_injective (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) : Lemma (and_then_cases_injective (parse_tagged_union_payload tag_of_data p)) = and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 -> parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1; parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2 ) val parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) : Tot (parser (and_then_kind kt k) data_t) val parse_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (input: bytes) : Lemma (parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end )) let bare_parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (k': (t: tag_t) -> Tot parser_kind) (p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t))) (input: bytes) : GTot (option (data_t * consumed_length input)) = match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end val parse_tagged_union_eq_gen (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (#kt': parser_kind) (pt': parser kt' tag_t) (lem_pt: ( (input: bytes) -> Lemma (parse pt input == parse pt' input) )) (k': (t: tag_t) -> Tot parser_kind) (p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t))) (lem_p' : ( (k: tag_t) -> (input: bytes) -> Lemma (parse (p k) input == parse (p' k) input) )) (input: bytes) : Lemma (parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input) let tot_parse_tagged_union_payload (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (tg: tag_t) : Pure (tot_parser k data_t) (requires True) (ensures (fun y -> forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x )) = tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) val tot_parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: tot_parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) : Pure (tot_parser (and_then_kind kt k) data_t) (requires True) (ensures (fun y -> forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x )) let tot_parse_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (pt: tot_parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (input: bytes) : Lemma (parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end )) = parse_tagged_union_eq #kt pt tag_of_data #k p input let bare_serialize_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Tot (bare_serializer data_t) = fun (d: data_t) -> let tg = tag_of_data d in Seq.append (st tg) (serialize (s tg) d) let seq_slice_append_l (#t: Type) (s1 s2: Seq.seq t) : Lemma (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1) = assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1) let seq_slice_append_r (#t: Type) (s1 s2: Seq.seq t) : Lemma (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2) = assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2) let bare_serialize_tagged_union_correct (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Lemma (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s))) = (* same proof as nondep_then *) let prf (x: data_t) : Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x))) = parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x); let t = tag_of_data x in let (u: refine_with_tag tag_of_data t) = x in let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in let v1 = parse pt (serialize st t) in assert (Some? v1); parser_kind_prop_equiv kt pt; assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); let (Some (_, len')) = parse pt (serialize st t) in assert (len' == Seq.length (serialize st t)); assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x)); assert (Seq.slice (serialize st t) 0 len' == st t); seq_slice_append_l (serialize st t) (serialize (s t) u); assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (Some? v1'); assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); let (Some (x1, len1)) = v1 in let (Some (x1', len1')) = v1' in assert (x1 == x1'); assert ((len1 <: nat) == (len1' <: nat)); assert (x1 == t); assert (len1 == Seq.length (serialize st t)); assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u)); seq_slice_append_r (serialize st t) (serialize (s t) u); () in Classical.forall_intro prf val serialize_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Pure (serializer (parse_tagged_union pt tag_of_data p)) (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (fun _ -> True)) val serialize_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) (input: data_t) : Lemma (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input)) [SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)] let serialize_tot_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: tot_parser kt tag_t) (st: serializer #kt pt) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer #k (p t))) : Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p)) (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (fun _ -> True)) = serialize_ext _ (serialize_tagged_union st tag_of_data s) _ (* Dependent pairs *) inline_for_extraction let synth_dtuple2 (#t1: Type) (#t2: t1 -> Type) (x: t1) (y: t2 x) : Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) = (| x, y |) let parse_dtuple2 (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) : Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2)) = parse_tagged_union p1 dfst (fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x)) inline_for_extraction let synth_dtuple2_recip (#t1: Type) (#t2: t1 -> Type) (x: t1) (y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) : Tot (t2 x) = dsnd y val serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) : Tot (serializer (parse_dtuple2 p1 p2)) val parse_dtuple2_eq (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) (b: bytes) : Lemma (parse (parse_dtuple2 p1 p2) b == (match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse (p2 x1) b' with | Some (x2, consumed2) -> Some ((| x1, x2 |), consumed1 + consumed2) | _ -> None end | _ -> None )) let bare_parse_dtuple2 (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) : Tot (bare_parser (dtuple2 t1 t2)) = fun b -> match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse (p2 x1) b' with | Some (x2, consumed2) -> Some ((| x1, x2 |), consumed1 + consumed2) | _ -> None end | _ -> None let parse_dtuple2_eq' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) (b: bytes) : Lemma (parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b) = parse_dtuple2_eq p1 p2 b val serialize_dtuple2_eq (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : Lemma (serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)) let bare_serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : GTot bytes = serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy) let serialize_dtuple2_eq' (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : Tot (squash ( (serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy))) = serialize_dtuple2_eq s1 s2 xy (* Special case for non-dependent parsing *) val nondep_then (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Tot (parser (and_then_kind k1 k2) (t1 * t2)) #set-options "--z3rlimit 16" val nondep_then_eq (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) (b: bytes) : Lemma (parse (nondep_then p1 p2) b == (match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse p2 b' with | Some (x2, consumed2) -> Some ((x1, x2), consumed1 + consumed2) | _ -> None end | _ -> None )) val tot_nondep_then (#k1: parser_kind) (#t1: Type) (p1: tot_parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: tot_parser k2 t2) : Pure (tot_parser (and_then_kind k1 k2) (t1 * t2)) (requires True) (ensures (fun y -> forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x )) let bare_serialize_nondep_then (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) (s2: serializer p2) : Tot (bare_serializer (t1 * t2)) = fun (x: t1 * t2) -> let (x1, x2) = x in Seq.append (s1 x1) (s2 x2) val serialize_nondep_then (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) : Tot (serializer (nondep_then p1 p2)) val serialize_nondep_then_eq (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (input: t1 * t2) : Lemma (serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input) val length_serialize_nondep_then (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (input1: t1) (input2: t2) : Lemma (Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2)) val serialize_nondep_then_upd_left (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) : Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s1 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y) )) val serialize_nondep_then_upd_left_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) (i' : nat) (s' : bytes) : Lemma (requires ( let s1' = serialize s1 (fst x) in i' + Seq.length s' <= Seq.length s1' /\ serialize s1 y == seq_upd_seq s1' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s' )) val serialize_nondep_then_upd_bw_left (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) : Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let len2 = Seq.length (serialize s2 (snd x)) in len2 + Seq.length (serialize s1 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y) )) #reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'" val serialize_nondep_then_upd_bw_left_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) (i' : nat) (s' : bytes) : Lemma (requires ( let s1' = serialize s1 (fst x) in i' + Seq.length s' <= Seq.length s1' /\ serialize s1 y == seq_upd_bw_seq s1' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let len2 = Seq.length (serialize s2 (snd x)) in len2 + i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s' )) val serialize_nondep_then_upd_right (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) : Lemma (requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s2 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y) )) val serialize_nondep_then_upd_right_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) (i' : nat) (s' : bytes) : Lemma (requires ( let s2' = serialize s2 (snd x) in i' + Seq.length s' <= Seq.length s2' /\ serialize s2 y == seq_upd_seq s2' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let l1 = Seq.length (serialize s1 (fst x)) in Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\ l1 + i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s' )) let serialize_nondep_then_upd_bw_right (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) : Lemma (requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s2 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y) )) = serialize_nondep_then_upd_right s1 s2 x y let serialize_nondep_then_upd_bw_right_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) (i' : nat) (s' : bytes) : Lemma (requires ( let s2' = serialize s2 (snd x) in i' + Seq.length s' <= Seq.length s2' /\ serialize s2 y == seq_upd_bw_seq s2' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let l1 = Seq.length (serialize s1 (fst x)) in Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\ i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s' )) = let s2' = serialize s2 (snd x) in let j' = Seq.length s2' - i' - Seq.length s' in assert (j' + Seq.length s' <= Seq.length s2'); assert (serialize s2 y == seq_upd_seq s2' j' s'); let s = serialize (serialize_nondep_then s1 s2) x in serialize_nondep_then_upd_right_chain s1 s2 x y j' s'; assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s'); () #reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Apply a total transformation on parsed data *) let parse_strengthen_prf (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) : Tot Type = (xbytes: bytes) -> (consumed: consumed_length xbytes) -> (x: t1) -> Lemma (requires (parse p1 xbytes == Some (x, consumed))) (ensures (p2 x)) let bare_parse_strengthen (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Tot (bare_parser (x: t1 { p2 x } )) = fun (xbytes: bytes) -> match parse p1 xbytes with | Some (x, consumed) -> prf xbytes consumed x; let (x' : t1 { p2 x' } ) = x in Some (x', consumed) | _ -> None let bare_parse_strengthen_no_lookahead (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf)) = let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2) let bare_parse_strengthen_injective (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (injective (bare_parse_strengthen p1 p2 prf)) = parser_kind_prop_equiv k p1; let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2); assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2) let bare_parse_strengthen_correct (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (injective (bare_parse_strengthen p1 p2 prf) /\ parser_kind_prop k (bare_parse_strengthen p1 p2 prf)) = parser_kind_prop_equiv k p1; bare_parse_strengthen_no_lookahead p1 p2 prf; bare_parse_strengthen_injective p1 p2 prf; parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf); () let parse_strengthen (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Tot (parser k (x: t1 { p2 x } )) = bare_parse_strengthen_correct p1 p2 prf; bare_parse_strengthen p1 p2 prf let serialize_strengthen' (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1 { p2 input } ) : GTot bytes = serialize s input let serialize_strengthen_correct (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1 { p2 input } ) : Lemma (let output = serialize_strengthen' p2 prf s input in parse (parse_strengthen p1 p2 prf) output == Some (input, Seq.length output)) = () let serialize_strengthen (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) : Tot (serializer (parse_strengthen p1 p2 prf)) = Classical.forall_intro (serialize_strengthen_correct p2 prf s); serialize_strengthen' p2 prf s let compose (#t1 #t2 #t3: Type) (f1: t1 -> GTot t2) (f2: t2 -> GTot t3) (x: t1) : GTot t3 = let y1 = f1 x in f2 y1 val make_total_constant_size_parser_compose (sz: nat) (t1 t2: Type) (f1: ((s: bytes {Seq.length s == sz}) -> GTot t1)) (g2: t1 -> GTot t2) : Lemma (requires ( make_total_constant_size_parser_precond sz t1 f1 /\ (forall x x' . g2 x == g2 x' ==> x == x') )) (ensures ( make_total_constant_size_parser_precond sz t1 f1 /\ make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\ (forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\ (forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input) )) (** Tot vs. Ghost *) unfold let lift_parser' (#k: parser_kind) (#t: Type) (f: unit -> GTot (parser k t))
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 32, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val lift_parser' (#k: parser_kind) (#t: Type) (f: (unit -> GTot (parser k t))) : Tot (bare_parser t)
[]
LowParse.Spec.Combinators.lift_parser'
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
f: (_: Prims.unit -> Prims.GTot (LowParse.Spec.Base.parser k t)) -> LowParse.Spec.Base.bare_parser t
{ "end_col": 42, "end_line": 1747, "start_col": 2, "start_line": 1747 }
Prims.GTot
val make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) (s1: bytes{Seq.length s1 == sz}) (s2: bytes{Seq.length s2 == sz}) : GTot Type0
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
val make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) (s1: bytes{Seq.length s1 == sz}) (s2: bytes{Seq.length s2 == sz}) : GTot Type0 let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) (s1: bytes{Seq.length s1 == sz}) (s2: bytes{Seq.length s2 == sz}) : GTot Type0 =
false
null
false
(Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "sometrivial" ]
[ "Prims.nat", "LowParse.Bytes.bytes", "Prims.eq2", "FStar.Seq.Base.length", "LowParse.Bytes.byte", "FStar.Pervasives.Native.option", "Prims.l_and", "Prims.l_or", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } )
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) (s1: bytes{Seq.length s1 == sz}) (s2: bytes{Seq.length s2 == sz}) : GTot Type0
[]
LowParse.Spec.Combinators.make_constant_size_parser_precond_precond
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
sz: Prims.nat -> t: Type -> f: (s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz} -> Prims.GTot (FStar.Pervasives.Native.option t)) -> s1: LowParse.Bytes.bytes{FStar.Seq.Base.length s1 == sz} -> s2: LowParse.Bytes.bytes{FStar.Seq.Base.length s2 == sz} -> Prims.GTot Type0
{ "end_col": 48, "end_line": 38, "start_col": 2, "start_line": 38 }
Prims.GTot
val make_constant_size_parser_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) : GTot Type0
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
val make_constant_size_parser_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) : GTot Type0 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) : GTot Type0 =
false
null
false
forall (s1: bytes{Seq.length s1 == sz}) (s2: bytes{Seq.length s2 == sz}). {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "sometrivial" ]
[ "Prims.nat", "LowParse.Bytes.bytes", "Prims.eq2", "FStar.Seq.Base.length", "LowParse.Bytes.byte", "FStar.Pervasives.Native.option", "Prims.l_Forall", "Prims.l_imp", "LowParse.Spec.Combinators.make_constant_size_parser_precond_precond", "FStar.Seq.Base.equal" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val make_constant_size_parser_precond (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) : GTot Type0
[]
LowParse.Spec.Combinators.make_constant_size_parser_precond
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
sz: Prims.nat -> t: Type -> f: (s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz} -> Prims.GTot (FStar.Pervasives.Native.option t)) -> Prims.GTot Type0
{ "end_col": 78, "end_line": 46, "start_col": 2, "start_line": 45 }
Prims.Tot
val parse_ret (#t: Type) (v: t) : Tot (parser parse_ret_kind t)
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v
val parse_ret (#t: Type) (v: t) : Tot (parser parse_ret_kind t) let parse_ret (#t: Type) (v: t) : Tot (parser parse_ret_kind t) =
false
null
false
tot_parse_ret v
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Combinators.tot_parse_ret", "LowParse.Spec.Base.parser", "LowParse.Spec.Combinators.parse_ret_kind" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val parse_ret (#t: Type) (v: t) : Tot (parser parse_ret_kind t)
[]
LowParse.Spec.Combinators.parse_ret
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
v: t -> LowParse.Spec.Base.parser LowParse.Spec.Combinators.parse_ret_kind t
{ "end_col": 17, "end_line": 202, "start_col": 2, "start_line": 202 }
Prims.Tot
val parse_empty:parser parse_ret_kind unit
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let parse_empty : parser parse_ret_kind unit = parse_ret ()
val parse_empty:parser parse_ret_kind unit let parse_empty:parser parse_ret_kind unit =
false
null
false
parse_ret ()
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Combinators.parse_ret", "Prims.unit" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x)
false
true
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val parse_empty:parser parse_ret_kind unit
[]
LowParse.Spec.Combinators.parse_empty
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
LowParse.Spec.Base.parser LowParse.Spec.Combinators.parse_ret_kind Prims.unit
{ "end_col": 14, "end_line": 215, "start_col": 2, "start_line": 215 }
Prims.Tot
val parse_fret (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (parser parse_ret_kind t')
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') = [@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in parse_fret' f v
val parse_fret (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (parser parse_ret_kind t') let parse_fret (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (parser parse_ret_kind t') =
false
null
false
[@@ inline_let ]let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in parse_fret' f v
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Combinators.parse_fret'", "Prims.unit", "LowParse.Spec.Base.parser_kind_prop_equiv", "LowParse.Spec.Combinators.parse_ret_kind", "LowParse.Spec.Base.parser" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else () #set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" let and_then_correct (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') /\ parser_kind_prop (and_then_kind k k') (and_then_bare p p') )) = parser_kind_prop_equiv k p; Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x)); parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p'); and_then_injective p p'; and_then_no_lookahead p p' #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" val and_then (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Pure (parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun _ -> True)) val and_then_eq (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) (input: bytes) : Lemma (requires (and_then_cases_injective p')) (ensures (parse (and_then p p') input == and_then_bare p p' input)) val tot_and_then (#k: parser_kind) (#t:Type) (p:tot_parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (tot_parser k' t'))) : Pure (tot_parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun y -> forall x . parse y x == parse (and_then #k p #k' p') x )) /// monadic return for the parser monad unfold let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') = fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) unfold
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val parse_fret (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (parser parse_ret_kind t')
[]
LowParse.Spec.Combinators.parse_fret
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
f: (_: t -> Prims.GTot t') -> v: t -> LowParse.Spec.Base.parser LowParse.Spec.Combinators.parse_ret_kind t'
{ "end_col": 17, "end_line": 562, "start_col": 2, "start_line": 561 }
Prims.Tot
val make_constant_size_parser_aux (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) : Tot (bare_parser t)
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end
val make_constant_size_parser_aux (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) : Tot (bare_parser t) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) : Tot (bare_parser t) =
false
null
false
fun (s: bytes) -> if Seq.length s < sz then None else let s':bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let sz:consumed_length s = sz in Some (v, sz)
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "Prims.nat", "LowParse.Bytes.bytes", "Prims.eq2", "FStar.Seq.Base.length", "LowParse.Bytes.byte", "FStar.Pervasives.Native.option", "Prims.op_LessThan", "FStar.Pervasives.Native.None", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "Prims.bool", "FStar.Pervasives.Native.Some", "FStar.Pervasives.Native.Mktuple2", "FStar.Seq.Base.slice", "LowParse.Spec.Base.bare_parser" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t)))
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val make_constant_size_parser_aux (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> GTot (option t))) : Tot (bare_parser t)
[]
LowParse.Spec.Combinators.make_constant_size_parser_aux
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
sz: Prims.nat -> t: Type -> f: (s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz} -> Prims.GTot (FStar.Pervasives.Native.option t)) -> LowParse.Spec.Base.bare_parser t
{ "end_col": 5, "end_line": 29, "start_col": 2, "start_line": 19 }
FStar.Pervasives.Lemma
val and_then_cases_injective_intro (#t #t': Type) (p': (t -> Tot (bare_parser t'))) (lem: (x1: t -> x2: t -> b1: bytes -> b2: bytes -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)))) : Lemma (and_then_cases_injective p')
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
val and_then_cases_injective_intro (#t #t': Type) (p': (t -> Tot (bare_parser t'))) (lem: (x1: t -> x2: t -> b1: bytes -> b2: bytes -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)))) : Lemma (and_then_cases_injective p') let and_then_cases_injective_intro (#t #t': Type) (p': (t -> Tot (bare_parser t'))) (lem: (x1: t -> x2: t -> b1: bytes -> b2: bytes -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)))) : Lemma (and_then_cases_injective p') =
false
null
true
Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1)))
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "lemma" ]
[ "LowParse.Spec.Base.bare_parser", "LowParse.Bytes.bytes", "Prims.unit", "LowParse.Spec.Combinators.and_then_cases_injective_precond", "Prims.squash", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern", "FStar.Classical.forall_intro_3", "Prims.l_Forall", "Prims.l_imp", "FStar.Classical.forall_intro", "FStar.Classical.move_requires", "Prims.l_True", "LowParse.Spec.Combinators.and_then_cases_injective" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val and_then_cases_injective_intro (#t #t': Type) (p': (t -> Tot (bare_parser t'))) (lem: (x1: t -> x2: t -> b1: bytes -> b2: bytes -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)))) : Lemma (and_then_cases_injective p')
[]
LowParse.Spec.Combinators.and_then_cases_injective_intro
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
p': (_: t -> LowParse.Spec.Base.bare_parser t') -> lem: (x1: t -> x2: t -> b1: LowParse.Bytes.bytes -> b2: LowParse.Bytes.bytes -> FStar.Pervasives.Lemma (requires LowParse.Spec.Combinators.and_then_cases_injective_precond p' x1 x2 b1 b2) (ensures x1 == x2)) -> FStar.Pervasives.Lemma (ensures LowParse.Spec.Combinators.and_then_cases_injective p')
{ "end_col": 108, "end_line": 325, "start_col": 2, "start_line": 325 }
FStar.Pervasives.Lemma
val synth_injective_intro' (#t1 #t2: Type) (f: (t1 -> GTot t2)) (prf: (x: t1 -> x': t1 -> Lemma (requires (f x == f x')) (ensures (x == x')))) : Lemma (synth_injective f)
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let synth_injective_intro' (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (prf: ( (x: t1) -> (x' : t1) -> Lemma (requires (f x == f x')) (ensures (x == x')) )) : Lemma (synth_injective f) = Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
val synth_injective_intro' (#t1 #t2: Type) (f: (t1 -> GTot t2)) (prf: (x: t1 -> x': t1 -> Lemma (requires (f x == f x')) (ensures (x == x')))) : Lemma (synth_injective f) let synth_injective_intro' (#t1 #t2: Type) (f: (t1 -> GTot t2)) (prf: (x: t1 -> x': t1 -> Lemma (requires (f x == f x')) (ensures (x == x')))) : Lemma (synth_injective f) =
false
null
true
Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x))
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "lemma" ]
[ "Prims.unit", "Prims.eq2", "Prims.squash", "Prims.Nil", "FStar.Pervasives.pattern", "FStar.Classical.forall_intro_2", "Prims.l_imp", "FStar.Classical.move_requires", "Prims.l_True", "LowParse.Spec.Combinators.synth_injective" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else () #set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" let and_then_correct (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') /\ parser_kind_prop (and_then_kind k k') (and_then_bare p p') )) = parser_kind_prop_equiv k p; Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x)); parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p'); and_then_injective p p'; and_then_no_lookahead p p' #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" val and_then (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Pure (parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun _ -> True)) val and_then_eq (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) (input: bytes) : Lemma (requires (and_then_cases_injective p')) (ensures (parse (and_then p p') input == and_then_bare p p' input)) val tot_and_then (#k: parser_kind) (#t:Type) (p:tot_parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (tot_parser k' t'))) : Pure (tot_parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun y -> forall x . parse y x == parse (and_then #k p #k' p') x )) /// monadic return for the parser monad unfold let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') = fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) unfold let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') = [@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in parse_fret' f v let synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 = forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x' let synth_injective_intro (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : Lemma (requires (forall (x x' : t1) . f x == f x' ==> x == x')) (ensures (synth_injective f)) = () let synth_injective_intro' (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (prf: ( (x: t1) -> (x' : t1) -> Lemma (requires (f x == f x')) (ensures (x == x')) )) : Lemma
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val synth_injective_intro' (#t1 #t2: Type) (f: (t1 -> GTot t2)) (prf: (x: t1 -> x': t1 -> Lemma (requires (f x == f x')) (ensures (x == x')))) : Lemma (synth_injective f)
[]
LowParse.Spec.Combinators.synth_injective_intro'
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
f: (_: t1 -> Prims.GTot t2) -> prf: (x: t1 -> x': t1 -> FStar.Pervasives.Lemma (requires f x == f x') (ensures x == x')) -> FStar.Pervasives.Lemma (ensures LowParse.Spec.Combinators.synth_injective f)
{ "end_col": 69, "end_line": 593, "start_col": 2, "start_line": 593 }
Prims.Tot
val parse_fret' (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (bare_parser t')
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') = fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
val parse_fret' (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (bare_parser t') let parse_fret' (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (bare_parser t') =
false
null
false
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Bytes.bytes", "FStar.Pervasives.Native.Some", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "FStar.Pervasives.Native.Mktuple2", "FStar.Pervasives.Native.option", "LowParse.Spec.Base.bare_parser" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else () #set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" let and_then_correct (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') /\ parser_kind_prop (and_then_kind k k') (and_then_bare p p') )) = parser_kind_prop_equiv k p; Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x)); parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p'); and_then_injective p p'; and_then_no_lookahead p p' #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" val and_then (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Pure (parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun _ -> True)) val and_then_eq (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) (input: bytes) : Lemma (requires (and_then_cases_injective p')) (ensures (parse (and_then p p') input == and_then_bare p p' input)) val tot_and_then (#k: parser_kind) (#t:Type) (p:tot_parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (tot_parser k' t'))) : Pure (tot_parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun y -> forall x . parse y x == parse (and_then #k p #k' p') x )) /// monadic return for the parser monad unfold
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val parse_fret' (#t #t': Type) (f: (t -> GTot t')) (v: t) : Tot (bare_parser t')
[]
LowParse.Spec.Combinators.parse_fret'
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
f: (_: t -> Prims.GTot t') -> v: t -> LowParse.Spec.Base.bare_parser t'
{ "end_col": 56, "end_line": 557, "start_col": 2, "start_line": 557 }
Prims.Tot
val and_then_kind (k1 k2: parser_kind) : Tot parser_kind
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; }
val and_then_kind (k1 k2: parser_kind) : Tot parser_kind let and_then_kind (k1 k2: parser_kind) : Tot parser_kind =
false
null
false
{ parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = if (is_some k1.parser_kind_high) `bool_and` (is_some k2.parser_kind_high) then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None }
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.Mkparser_kind'", "Prims.op_Addition", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low", "LowParse.Spec.Base.bool_and", "LowParse.Spec.Base.is_some", "Prims.nat", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.some_v", "Prims.bool", "FStar.Pervasives.Native.None", "FStar.Pervasives.Native.option", "Prims.op_Equality", "LowParse.Spec.Base.parser_subkind", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind", "LowParse.Spec.Base.ParserConsumesAll", "LowParse.Spec.Base.ParserStrong", "LowParse.Spec.Combinators.and_then_metadata", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_metadata" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind
false
true
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val and_then_kind (k1 k2: parser_kind) : Tot parser_kind
[]
LowParse.Spec.Combinators.and_then_kind
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
k1: LowParse.Spec.Base.parser_kind -> k2: LowParse.Spec.Base.parser_kind -> LowParse.Spec.Base.parser_kind
{ "end_col": 10, "end_line": 467, "start_col": 4, "start_line": 450 }
Prims.Tot
val lift_parser (#k: parser_kind) (#t: Type) (f: (unit -> GTot (parser k t))) : Tot (parser k t)
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let lift_parser (#k: parser_kind) (#t: Type) (f: unit -> GTot (parser k t)) : Tot (parser k t) = lift_parser_correct f; lift_parser' f
val lift_parser (#k: parser_kind) (#t: Type) (f: (unit -> GTot (parser k t))) : Tot (parser k t) let lift_parser (#k: parser_kind) (#t: Type) (f: (unit -> GTot (parser k t))) : Tot (parser k t) =
false
null
false
lift_parser_correct f; lift_parser' f
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Base.parser_kind", "Prims.unit", "LowParse.Spec.Base.parser", "LowParse.Spec.Combinators.lift_parser'", "LowParse.Spec.Combinators.lift_parser_correct" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else () #set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" let and_then_correct (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') /\ parser_kind_prop (and_then_kind k k') (and_then_bare p p') )) = parser_kind_prop_equiv k p; Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x)); parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p'); and_then_injective p p'; and_then_no_lookahead p p' #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" val and_then (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Pure (parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun _ -> True)) val and_then_eq (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) (input: bytes) : Lemma (requires (and_then_cases_injective p')) (ensures (parse (and_then p p') input == and_then_bare p p' input)) val tot_and_then (#k: parser_kind) (#t:Type) (p:tot_parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (tot_parser k' t'))) : Pure (tot_parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun y -> forall x . parse y x == parse (and_then #k p #k' p') x )) /// monadic return for the parser monad unfold let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') = fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) unfold let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') = [@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in parse_fret' f v let synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 = forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x' let synth_injective_intro (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : Lemma (requires (forall (x x' : t1) . f x == f x' ==> x == x')) (ensures (synth_injective f)) = () let synth_injective_intro' (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (prf: ( (x: t1) -> (x' : t1) -> Lemma (requires (f x == f x')) (ensures (x == x')) )) : Lemma (synth_injective f) = Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x)) let parse_synth' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Tot (bare_parser t2) = fun b -> match parse p1 b with | None -> None | Some (x1, consumed) -> Some (f2 x1, consumed) val parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Pure (parser k t2) (requires ( synth_injective f2 )) (ensures (fun _ -> True)) val parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) let parse_synth_eq2 (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (sq: squash (synth_injective f2)) (b: bytes) : Lemma (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) = parse_synth_eq p1 f2 b val tot_parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) : Pure (tot_parser k t2) (requires ( synth_injective f2 )) (ensures (fun y -> forall x . parse y x == parse (parse_synth #k p1 f2) x )) let tot_parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b)) = parse_synth_eq #k p1 f2 b let bare_serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Tot (bare_serializer t2) = fun (x: t2) -> s1 (g1 x) val bare_serialize_synth_correct (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Lemma (requires ( (forall (x : t2) . f2 (g1 x) == x) /\ (forall (x x' : t1) . f2 x == f2 x' ==> x == x') )) (ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 ))) let synth_inverse (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : GTot Type0 = (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x) let synth_inverse_intro (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : Lemma (requires (forall (x : t2) . f2 (g1 x) == x)) (ensures (synth_inverse f2 g1)) = () let synth_inverse_intro' (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) (prf: (x: t2) -> Lemma (f2 (g1 x) == x)) : Lemma (ensures (synth_inverse f2 g1)) = Classical.forall_intro prf let synth_inverse_synth_injective_pat (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1)) : Lemma (requires (synth_inverse g f)) (ensures (synth_injective f)) [SMTPat (synth_inverse g f)] = assert (forall x1 x2. f x1 == f x2 ==> g (f x1) == g (f x2)) let synth_inverse_synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (g: (t2 -> GTot t1)) : Lemma (requires (synth_inverse g f)) (ensures (synth_injective f)) = () let synth_inverse_synth_injective' (#t1: Type) (#t2: Type) (g: (t2 -> GTot t1)) (f: (t1 -> GTot t2)) (u: squash (synth_inverse g f)) : Tot (squash (synth_injective f)) = () let synth_injective_synth_inverse_synth_inverse_recip (#t1: Type) (#t2: Type) (g: (t2 -> GTot t1)) (f: (t1 -> GTot t2)) (u: squash (synth_inverse g f /\ synth_injective g)) : Tot (squash (synth_inverse f g)) = assert (forall x . g (f (g x)) == g x) val serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) : Tot (serializer (parse_synth p1 f2)) val serialize_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x: t2) : Lemma (serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x)) let serialize_synth_eq' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x: t2) (y1: bytes) (q1: squash (y1 == serialize (serialize_synth p1 f2 s1 g1 u) x)) (y2: bytes) (q2: squash (y2 == serialize s1 (g1 x))) : Lemma (ensures (y1 == y2)) = serialize_synth_eq p1 f2 s1 g1 u x let serialize_tot_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) : Tot (serializer #k (tot_parse_synth p1 f2)) = serialize_ext #k _ (serialize_synth #k p1 f2 s1 g1 u) _ val serialize_synth_upd_chain (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x1: t1) (x2: t2) (y1: t1) (y2: t2) (i': nat) (s' : bytes) : Lemma (requires ( let s = serialize s1 x1 in i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_seq s i' s' /\ x2 == f2 x1 /\ y2 == f2 y1 )) (ensures ( let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\ serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s' )) val serialize_synth_upd_bw_chain (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) (u: unit { synth_inverse f2 g1 /\ synth_injective f2 }) (x1: t1) (x2: t2) (y1: t1) (y2: t2) (i': nat) (s' : bytes) : Lemma (requires ( let s = serialize s1 x1 in i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_bw_seq s i' s' /\ x2 == f2 x1 /\ y2 == f2 y1 )) (ensures ( let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\ serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s' )) (* Strengthened versions of and_then *) inline_for_extraction let synth_tagged_union_data (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (tg: tag_t) (x: refine_with_tag tag_of_data tg) : Tot data_t = x let parse_tagged_union_payload (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (tg: tag_t) : Tot (parser k data_t) = parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) let parse_tagged_union_payload_and_then_cases_injective (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) : Lemma (and_then_cases_injective (parse_tagged_union_payload tag_of_data p)) = and_then_cases_injective_intro (parse_tagged_union_payload tag_of_data p) (fun x1 x2 b1 b2 -> parse_synth_eq #k #(refine_with_tag tag_of_data x1) (p x1) (synth_tagged_union_data tag_of_data x1) b1; parse_synth_eq #k #(refine_with_tag tag_of_data x2) (p x2) (synth_tagged_union_data tag_of_data x2) b2 ) val parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) : Tot (parser (and_then_kind kt k) data_t) val parse_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (input: bytes) : Lemma (parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end )) let bare_parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (k': (t: tag_t) -> Tot parser_kind) (p: (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t))) (input: bytes) : GTot (option (data_t * consumed_length input)) = match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end val parse_tagged_union_eq_gen (#kt: parser_kind) (#tag_t: Type) (pt: parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (#kt': parser_kind) (pt': parser kt' tag_t) (lem_pt: ( (input: bytes) -> Lemma (parse pt input == parse pt' input) )) (k': (t: tag_t) -> Tot parser_kind) (p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t))) (lem_p' : ( (k: tag_t) -> (input: bytes) -> Lemma (parse (p k) input == parse (p' k) input) )) (input: bytes) : Lemma (parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input) let tot_parse_tagged_union_payload (#tag_t: Type) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (tg: tag_t) : Pure (tot_parser k data_t) (requires True) (ensures (fun y -> forall x . parse y x == parse (parse_tagged_union_payload tag_of_data #k p tg) x )) = tot_parse_synth #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) val tot_parse_tagged_union (#kt: parser_kind) (#tag_t: Type) (pt: tot_parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) : Pure (tot_parser (and_then_kind kt k) data_t) (requires True) (ensures (fun y -> forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x )) let tot_parse_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (pt: tot_parser kt tag_t) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (input: bytes) : Lemma (parse (tot_parse_tagged_union pt tag_of_data p) input == (match parse pt input with | None -> None | Some (tg, consumed_tg) -> let input_tg = Seq.slice input consumed_tg (Seq.length input) in begin match parse (p tg) input_tg with | Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x) | None -> None end )) = parse_tagged_union_eq #kt pt tag_of_data #k p input let bare_serialize_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Tot (bare_serializer data_t) = fun (d: data_t) -> let tg = tag_of_data d in Seq.append (st tg) (serialize (s tg) d) let seq_slice_append_l (#t: Type) (s1 s2: Seq.seq t) : Lemma (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1) == s1) = assert (Seq.equal (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) s1) let seq_slice_append_r (#t: Type) (s1 s2: Seq.seq t) : Lemma (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2)) == s2) = assert (Seq.equal (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length (Seq.append s1 s2))) s2) let bare_serialize_tagged_union_correct (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Lemma (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (serializer_correct (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s))) = (* same proof as nondep_then *) let prf (x: data_t) : Lemma (parse (parse_tagged_union pt tag_of_data p) (bare_serialize_tagged_union st tag_of_data s x) == Some (x, Seq.length (bare_serialize_tagged_union st tag_of_data s x))) = parse_tagged_union_eq pt tag_of_data p (bare_serialize_tagged_union st tag_of_data s x); let t = tag_of_data x in let (u: refine_with_tag tag_of_data t) = x in let v1' = parse pt (bare_serialize_tagged_union st tag_of_data s x) in let v1 = parse pt (serialize st t) in assert (Some? v1); parser_kind_prop_equiv kt pt; assert (no_lookahead_on pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); let (Some (_, len')) = parse pt (serialize st t) in assert (len' == Seq.length (serialize st t)); assert (len' <= Seq.length (bare_serialize_tagged_union st tag_of_data s x)); assert (Seq.slice (serialize st t) 0 len' == st t); seq_slice_append_l (serialize st t) (serialize (s t) u); assert (no_lookahead_on_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (no_lookahead_on_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (Some? v1'); assert (injective_precond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); assert (injective_postcond pt (serialize st t) (bare_serialize_tagged_union st tag_of_data s x)); let (Some (x1, len1)) = v1 in let (Some (x1', len1')) = v1' in assert (x1 == x1'); assert ((len1 <: nat) == (len1' <: nat)); assert (x1 == t); assert (len1 == Seq.length (serialize st t)); assert (bare_serialize_tagged_union st tag_of_data s x == Seq.append (serialize st t) (serialize (s t) u)); seq_slice_append_r (serialize st t) (serialize (s t) u); () in Classical.forall_intro prf val serialize_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) : Pure (serializer (parse_tagged_union pt tag_of_data p)) (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (fun _ -> True)) val serialize_tagged_union_eq (#kt: parser_kind) (#tag_t: Type) (#pt: parser kt tag_t) (st: serializer pt) (#data_t: Type) (tag_of_data: (data_t -> GTot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer (p t))) (input: data_t) : Lemma (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input)) [SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)] let serialize_tot_tagged_union (#kt: parser_kind) (#tag_t: Type) (#pt: tot_parser kt tag_t) (st: serializer #kt pt) (#data_t: Type) (tag_of_data: (data_t -> Tot tag_t)) (#k: parser_kind) (#p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t))) (s: (t: tag_t) -> Tot (serializer #k (p t))) : Pure (serializer #(and_then_kind kt k) (tot_parse_tagged_union pt tag_of_data p)) (requires (kt.parser_kind_subkind == Some ParserStrong)) (ensures (fun _ -> True)) = serialize_ext _ (serialize_tagged_union st tag_of_data s) _ (* Dependent pairs *) inline_for_extraction let synth_dtuple2 (#t1: Type) (#t2: t1 -> Type) (x: t1) (y: t2 x) : Tot (refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) = (| x, y |) let parse_dtuple2 (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) : Tot (parser (and_then_kind k1 k2) (dtuple2 t1 t2)) = parse_tagged_union p1 dfst (fun (x: t1) -> parse_synth (p2 x) (synth_dtuple2 x)) inline_for_extraction let synth_dtuple2_recip (#t1: Type) (#t2: t1 -> Type) (x: t1) (y: refine_with_tag #t1 #(dtuple2 t1 t2) dfst x) : Tot (t2 x) = dsnd y val serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) : Tot (serializer (parse_dtuple2 p1 p2)) val parse_dtuple2_eq (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) (b: bytes) : Lemma (parse (parse_dtuple2 p1 p2) b == (match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse (p2 x1) b' with | Some (x2, consumed2) -> Some ((| x1, x2 |), consumed1 + consumed2) | _ -> None end | _ -> None )) let bare_parse_dtuple2 (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) : Tot (bare_parser (dtuple2 t1 t2)) = fun b -> match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse (p2 x1) b' with | Some (x2, consumed2) -> Some ((| x1, x2 |), consumed1 + consumed2) | _ -> None end | _ -> None let parse_dtuple2_eq' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (p2: (x: t1) -> parser k2 (t2 x)) (b: bytes) : Lemma (parse (parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2) b == bare_parse_dtuple2 #k1 #t1 p1 #k2 #t2 p2 b) = parse_dtuple2_eq p1 p2 b val serialize_dtuple2_eq (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : Lemma (serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy)) let bare_serialize_dtuple2 (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : GTot bytes = serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy) let serialize_dtuple2_eq' (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong }) (#k2: parser_kind) (#t2: (t1 -> Tot Type)) (#p2: (x: t1) -> parser k2 (t2 x)) (s2: (x: t1) -> serializer (p2 x)) (xy: dtuple2 t1 t2) : Tot (squash ( (serialize #_ #(dtuple2 t1 t2) (serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2) xy == bare_serialize_dtuple2 #k1 #t1 #p1 s1 #k2 #t2 #p2 s2 xy))) = serialize_dtuple2_eq s1 s2 xy (* Special case for non-dependent parsing *) val nondep_then (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Tot (parser (and_then_kind k1 k2) (t1 * t2)) #set-options "--z3rlimit 16" val nondep_then_eq (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) (b: bytes) : Lemma (parse (nondep_then p1 p2) b == (match parse p1 b with | Some (x1, consumed1) -> let b' = Seq.slice b consumed1 (Seq.length b) in begin match parse p2 b' with | Some (x2, consumed2) -> Some ((x1, x2), consumed1 + consumed2) | _ -> None end | _ -> None )) val tot_nondep_then (#k1: parser_kind) (#t1: Type) (p1: tot_parser k1 t1) (#k2: parser_kind) (#t2: Type) (p2: tot_parser k2 t2) : Pure (tot_parser (and_then_kind k1 k2) (t1 * t2)) (requires True) (ensures (fun y -> forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x )) let bare_serialize_nondep_then (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) (s2: serializer p2) : Tot (bare_serializer (t1 * t2)) = fun (x: t1 * t2) -> let (x1, x2) = x in Seq.append (s1 x1) (s2 x2) val serialize_nondep_then (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) : Tot (serializer (nondep_then p1 p2)) val serialize_nondep_then_eq (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (input: t1 * t2) : Lemma (serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input) val length_serialize_nondep_then (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (input1: t1) (input2: t2) : Lemma (Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2)) val serialize_nondep_then_upd_left (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) : Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s1 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y) )) val serialize_nondep_then_upd_left_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) (i' : nat) (s' : bytes) : Lemma (requires ( let s1' = serialize s1 (fst x) in i' + Seq.length s' <= Seq.length s1' /\ serialize s1 y == seq_upd_seq s1' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s' )) val serialize_nondep_then_upd_bw_left (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) : Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let len2 = Seq.length (serialize s2 (snd x)) in len2 + Seq.length (serialize s1 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y) )) #reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'" val serialize_nondep_then_upd_bw_left_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t1) (i' : nat) (s' : bytes) : Lemma (requires ( let s1' = serialize s1 (fst x) in i' + Seq.length s' <= Seq.length s1' /\ serialize s1 y == seq_upd_bw_seq s1' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let len2 = Seq.length (serialize s2 (snd x)) in len2 + i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s' )) val serialize_nondep_then_upd_right (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) : Lemma (requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s2 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y) )) val serialize_nondep_then_upd_right_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) (i' : nat) (s' : bytes) : Lemma (requires ( let s2' = serialize s2 (snd x) in i' + Seq.length s' <= Seq.length s2' /\ serialize s2 y == seq_upd_seq s2' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let l1 = Seq.length (serialize s1 (fst x)) in Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\ l1 + i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s' )) let serialize_nondep_then_upd_bw_right (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) : Lemma (requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x)))) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in Seq.length (serialize s2 y) <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s 0 (serialize s2 y) )) = serialize_nondep_then_upd_right s1 s2 x y let serialize_nondep_then_upd_bw_right_chain (#k1: parser_kind) (#t1: Type) (#p1: parser k1 t1) (s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } ) (#k2: parser_kind) (#t2: Type) (#p2: parser k2 t2) (s2: serializer p2) (x: t1 * t2) (y: t2) (i' : nat) (s' : bytes) : Lemma (requires ( let s2' = serialize s2 (snd x) in i' + Seq.length s' <= Seq.length s2' /\ serialize s2 y == seq_upd_bw_seq s2' i' s' )) (ensures ( let s = serialize (serialize_nondep_then s1 s2) x in let l1 = Seq.length (serialize s1 (fst x)) in Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\ i' + Seq.length s' <= Seq.length s /\ serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_bw_seq s i' s' )) = let s2' = serialize s2 (snd x) in let j' = Seq.length s2' - i' - Seq.length s' in assert (j' + Seq.length s' <= Seq.length s2'); assert (serialize s2 y == seq_upd_seq s2' j' s'); let s = serialize (serialize_nondep_then s1 s2) x in serialize_nondep_then_upd_right_chain s1 s2 x y j' s'; assert (Seq.length (serialize s1 (fst x)) + j' == Seq.length s - i' - Seq.length s'); () #reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Apply a total transformation on parsed data *) let parse_strengthen_prf (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) : Tot Type = (xbytes: bytes) -> (consumed: consumed_length xbytes) -> (x: t1) -> Lemma (requires (parse p1 xbytes == Some (x, consumed))) (ensures (p2 x)) let bare_parse_strengthen (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Tot (bare_parser (x: t1 { p2 x } )) = fun (xbytes: bytes) -> match parse p1 xbytes with | Some (x, consumed) -> prf xbytes consumed x; let (x' : t1 { p2 x' } ) = x in Some (x', consumed) | _ -> None let bare_parse_strengthen_no_lookahead (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (no_lookahead p1 ==> no_lookahead (bare_parse_strengthen p1 p2 prf)) = let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in assert (forall (b1 b2: bytes) . no_lookahead_on p1 b1 b2 ==> no_lookahead_on p' b1 b2) let bare_parse_strengthen_injective (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (injective (bare_parse_strengthen p1 p2 prf)) = parser_kind_prop_equiv k p1; let p' : bare_parser (x: t1 { p2 x } ) = bare_parse_strengthen p1 p2 prf in assert (forall (b1 b2: bytes) . injective_precond p' b1 b2 ==> injective_precond p1 b1 b2); assert (forall (b1 b2: bytes) . injective_postcond p1 b1 b2 ==> injective_postcond p' b1 b2) let bare_parse_strengthen_correct (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Lemma (injective (bare_parse_strengthen p1 p2 prf) /\ parser_kind_prop k (bare_parse_strengthen p1 p2 prf)) = parser_kind_prop_equiv k p1; bare_parse_strengthen_no_lookahead p1 p2 prf; bare_parse_strengthen_injective p1 p2 prf; parser_kind_prop_equiv k (bare_parse_strengthen p1 p2 prf); () let parse_strengthen (#k: parser_kind) (#t1: Type) (p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) : Tot (parser k (x: t1 { p2 x } )) = bare_parse_strengthen_correct p1 p2 prf; bare_parse_strengthen p1 p2 prf let serialize_strengthen' (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1 { p2 input } ) : GTot bytes = serialize s input let serialize_strengthen_correct (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) (input: t1 { p2 input } ) : Lemma (let output = serialize_strengthen' p2 prf s input in parse (parse_strengthen p1 p2 prf) output == Some (input, Seq.length output)) = () let serialize_strengthen (#k: parser_kind) (#t1: Type) (#p1: parser k t1) (p2: t1 -> GTot Type0) (prf: parse_strengthen_prf p1 p2) (s: serializer p1) : Tot (serializer (parse_strengthen p1 p2 prf)) = Classical.forall_intro (serialize_strengthen_correct p2 prf s); serialize_strengthen' p2 prf s let compose (#t1 #t2 #t3: Type) (f1: t1 -> GTot t2) (f2: t2 -> GTot t3) (x: t1) : GTot t3 = let y1 = f1 x in f2 y1 val make_total_constant_size_parser_compose (sz: nat) (t1 t2: Type) (f1: ((s: bytes {Seq.length s == sz}) -> GTot t1)) (g2: t1 -> GTot t2) : Lemma (requires ( make_total_constant_size_parser_precond sz t1 f1 /\ (forall x x' . g2 x == g2 x' ==> x == x') )) (ensures ( make_total_constant_size_parser_precond sz t1 f1 /\ make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\ (forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\ (forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input) )) (** Tot vs. Ghost *) unfold let lift_parser' (#k: parser_kind) (#t: Type) (f: unit -> GTot (parser k t)) : Tot (bare_parser t) = fun (input: bytes) -> parse (f ()) input let lift_parser_correct (#k: parser_kind) (#t: Type) (f: unit -> GTot (parser k t)) : Lemma (parser_kind_prop k (lift_parser' f)) = parser_kind_prop_ext k (f ()) (lift_parser' f) let lift_parser (#k: parser_kind) (#t: Type) (f: unit -> GTot (parser k t))
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 32, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val lift_parser (#k: parser_kind) (#t: Type) (f: (unit -> GTot (parser k t))) : Tot (parser k t)
[]
LowParse.Spec.Combinators.lift_parser
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
f: (_: Prims.unit -> Prims.GTot (LowParse.Spec.Base.parser k t)) -> LowParse.Spec.Base.parser k t
{ "end_col": 16, "end_line": 1763, "start_col": 2, "start_line": 1762 }
FStar.Pervasives.Lemma
val and_then_no_lookahead (#k: parser_kind) (#t: Type) (p: parser k t) (#k': parser_kind) (#t': Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires (and_then_cases_injective p')) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else ()
val and_then_no_lookahead (#k: parser_kind) (#t: Type) (p: parser k t) (#k': parser_kind) (#t': Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires (and_then_cases_injective p')) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) let and_then_no_lookahead (#k: parser_kind) (#t: Type) (p: parser k t) (#k': parser_kind) (#t': Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires (and_then_cases_injective p')) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) =
false
null
true
parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x))
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "lemma" ]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "Prims.op_AmpAmp", "Prims.op_Equality", "FStar.Pervasives.Native.option", "LowParse.Spec.Base.parser_subkind", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.ParserStrong", "FStar.Classical.forall_intro_2", "LowParse.Bytes.bytes", "Prims.l_imp", "Prims.l_and", "LowParse.Spec.Base.no_lookahead", "LowParse.Spec.Base.injective", "Prims.l_Forall", "LowParse.Spec.Base.no_lookahead_on", "LowParse.Spec.Combinators.and_then_bare", "FStar.Classical.move_requires", "LowParse.Spec.Combinators.and_then_no_lookahead_on", "Prims.unit", "Prims.l_True", "Prims.squash", "Prims.Nil", "FStar.Pervasives.pattern", "Prims.bool", "FStar.Classical.forall_intro", "Prims.l_iff", "LowParse.Spec.Base.parser_kind_prop", "LowParse.Spec.Base.parser_kind_prop'", "LowParse.Spec.Base.parser_kind_prop_equiv", "LowParse.Spec.Combinators.and_then_cases_injective", "Prims.eq2" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' ))
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val and_then_no_lookahead (#k: parser_kind) (#t: Type) (p: parser k t) (#k': parser_kind) (#t': Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires (and_then_cases_injective p')) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p')))
[]
LowParse.Spec.Combinators.and_then_no_lookahead
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
p: LowParse.Spec.Base.parser k t -> p': (_: t -> LowParse.Spec.Base.parser k' t') -> FStar.Pervasives.Lemma (requires LowParse.Spec.Combinators.and_then_cases_injective p') (ensures Mkparser_kind'?.parser_kind_subkind k == FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong /\ Mkparser_kind'?.parser_kind_subkind k' == FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong ==> LowParse.Spec.Base.no_lookahead (LowParse.Spec.Combinators.and_then_bare p p'))
{ "end_col": 9, "end_line": 486, "start_col": 2, "start_line": 482 }
Prims.Tot
val tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> Tot (option t))) : Tot (tot_bare_parser t)
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end
val tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> Tot (option t))) : Tot (tot_bare_parser t) let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> Tot (option t))) : Tot (tot_bare_parser t) =
false
null
false
fun (s: bytes) -> if Seq.length s < sz then None else let s':bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let sz:consumed_length s = sz in Some (v, sz)
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "Prims.nat", "LowParse.Bytes.bytes", "Prims.eq2", "FStar.Seq.Base.length", "LowParse.Bytes.byte", "FStar.Pervasives.Native.option", "Prims.op_LessThan", "FStar.Pervasives.Native.None", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "Prims.bool", "FStar.Pervasives.Native.Some", "FStar.Pervasives.Native.Mktuple2", "FStar.Seq.Base.slice", "LowParse.Spec.Base.tot_bare_parser" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t)))
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: (s: bytes{Seq.length s == sz} -> Tot (option t))) : Tot (tot_bare_parser t)
[]
LowParse.Spec.Combinators.tot_make_constant_size_parser_aux
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
sz: Prims.nat -> t: Type -> f: (s: LowParse.Bytes.bytes{FStar.Seq.Base.length s == sz} -> FStar.Pervasives.Native.option t) -> LowParse.Spec.Base.tot_bare_parser t
{ "end_col": 5, "end_line": 123, "start_col": 2, "start_line": 113 }
Prims.Tot
val serialize_empty:serializer parse_empty
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ())
val serialize_empty:serializer parse_empty let serialize_empty:serializer parse_empty =
false
null
false
serialize_ret () (fun _ -> ())
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "total" ]
[ "LowParse.Spec.Combinators.serialize_ret", "Prims.unit" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret ()
false
true
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val serialize_empty:serializer parse_empty
[]
LowParse.Spec.Combinators.serialize_empty
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
LowParse.Spec.Base.serializer LowParse.Spec.Combinators.parse_empty
{ "end_col": 77, "end_line": 217, "start_col": 47, "start_line": 217 }
FStar.Pervasives.Lemma
val synth_inverse_intro' (#t1 #t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) (prf: (x: t2 -> Lemma (f2 (g1 x) == x))) : Lemma (ensures (synth_inverse f2 g1))
[ { "abbrev": true, "full_module": "FStar.Tactics", "short_module": "T" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Spec.Base", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
false
let synth_inverse_intro' (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) (prf: (x: t2) -> Lemma (f2 (g1 x) == x)) : Lemma (ensures (synth_inverse f2 g1)) = Classical.forall_intro prf
val synth_inverse_intro' (#t1 #t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) (prf: (x: t2 -> Lemma (f2 (g1 x) == x))) : Lemma (ensures (synth_inverse f2 g1)) let synth_inverse_intro' (#t1 #t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) (prf: (x: t2 -> Lemma (f2 (g1 x) == x))) : Lemma (ensures (synth_inverse f2 g1)) =
false
null
true
Classical.forall_intro prf
{ "checked_file": "LowParse.Spec.Combinators.fsti.checked", "dependencies": [ "prims.fst.checked", "LowParse.Spec.Base.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Tactics.fst.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Combinators.fsti" }
[ "lemma" ]
[ "Prims.unit", "Prims.l_True", "Prims.squash", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern", "FStar.Classical.forall_intro", "LowParse.Spec.Combinators.synth_inverse" ]
[]
module LowParse.Spec.Combinators include LowParse.Spec.Base module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 module T = FStar.Tactics #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" (** Constant-size parsers *) let make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Tot (bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let make_constant_size_parser_precond_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) (s1: bytes { Seq.length s1 == sz } ) (s2: bytes { Seq.length s2 == sz } ) : GTot Type0 = (Some? (f s1) \/ Some? (f s2)) /\ f s1 == f s2 let make_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> Seq.equal s1 s2 let make_constant_size_parser_precond' (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} make_constant_size_parser_precond_precond sz t f s1 s2 ==> s1 == s2 let make_constant_size_parser_injective (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Lemma (requires ( make_constant_size_parser_precond sz t f )) (ensures ( injective (make_constant_size_parser_aux sz t f) )) = let p : bare_parser t = make_constant_size_parser_aux sz t f in let prf1 (b1 b2: bytes) : Lemma (requires (injective_precond p b1 b2)) (ensures (injective_postcond p b1 b2)) = assert (Some? (parse p b1)); assert (Some? (parse p b2)); let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in assert ((len1 <: nat) == (len2 <: nat)); assert ((len1 <: nat) == sz); assert ((len2 <: nat) == sz); assert (make_constant_size_parser_precond_precond sz t f (Seq.slice b1 0 len1) (Seq.slice b2 0 len2)); assert (make_constant_size_parser_precond' sz t f) in Classical.forall_intro_2 (fun (b1: bytes) -> Classical.move_requires (prf1 b1)) let constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz None let make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot (option t))) : Pure ( parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let tot_make_constant_size_parser_aux (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Tot (tot_bare_parser t) = fun (s: bytes) -> if Seq.length s < sz then None else begin let s' : bytes = Seq.slice s 0 sz in match f s' with | None -> None | Some v -> let (sz: consumed_length s) = sz in Some (v, sz) end let tot_make_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot (option t))) : Pure ( tot_parser (constant_size_parser_kind sz) t ) (requires ( make_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser_aux sz t f in make_constant_size_parser_injective sz t f; parser_kind_prop_equiv (constant_size_parser_kind sz) p; p let make_total_constant_size_parser_precond (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : GTot Type0 = forall (s1: bytes {Seq.length s1 == sz}) (s2: bytes {Seq.length s2 == sz}) . {:pattern (f s1); (f s2)} f s1 == f s2 ==> Seq.equal s1 s2 let make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> GTot t)) : Pure ( parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : bare_parser t = make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p let tot_make_total_constant_size_parser (sz: nat) (t: Type) (f: ((s: bytes {Seq.length s == sz}) -> Tot t)) : Pure ( tot_parser (total_constant_size_parser_kind sz) t ) (requires ( make_total_constant_size_parser_precond sz t f )) (ensures (fun _ -> True)) = let p : tot_bare_parser t = tot_make_constant_size_parser sz t (fun x -> Some (f x)) in parser_kind_prop_equiv (total_constant_size_parser_kind sz) p; p (** Combinators *) /// monadic return for the parser monad unfold let parse_ret' (#t:Type) (v:t) : Tot (tot_bare_parser t) = fun (b: bytes) -> Some (v, (0 <: consumed_length b)) // unfold inline_for_extraction let parse_ret_kind : parser_kind = strong_parser_kind 0 0 (Some ParserKindMetadataTotal) let tot_parse_ret (#t:Type) (v:t) : Tot (tot_parser parse_ret_kind t) = parser_kind_prop_equiv parse_ret_kind (parse_ret' v); parse_ret' v let parse_ret (#t:Type) (v:t) : Tot (parser parse_ret_kind t) = tot_parse_ret v let serialize_ret (#t: Type) (v: t) (v_unique: (v' : t) -> Lemma (v == v')) : Tot (serializer (parse_ret v)) = mk_serializer (parse_ret v) (fun (x: t) -> Seq.empty) (fun x -> v_unique x) let parse_empty : parser parse_ret_kind unit = parse_ret () let serialize_empty : serializer parse_empty = serialize_ret () (fun _ -> ()) #set-options "--z3rlimit 16" let fail_parser_kind_precond (k: parser_kind) : GTot Type0 = k.parser_kind_metadata <> Some ParserKindMetadataTotal /\ (Some? k.parser_kind_high ==> k.parser_kind_low <= Some?.v k.parser_kind_high) let fail_parser' (t: Type) : Tot (tot_bare_parser t) = fun _ -> None let tot_fail_parser (k: parser_kind) (t: Type) : Pure (tot_parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = let p = fail_parser' t in parser_kind_prop_equiv k p; tot_strengthen k p let fail_parser (k: parser_kind) (t: Type) : Pure (parser k t) (requires (fail_parser_kind_precond k)) (ensures (fun _ -> True)) = tot_fail_parser k t let fail_serializer (k: parser_kind {fail_parser_kind_precond k} ) (t: Type) (prf: (x: t) -> Lemma False) : Tot (serializer (fail_parser k t)) = mk_serializer (fail_parser k t) (fun x -> prf x; false_elim ()) (fun x -> prf x) inline_for_extraction let parse_false_kind = strong_parser_kind 0 0 (Some ParserKindMetadataFail) let parse_false : parser parse_false_kind (squash False) = fail_parser _ _ let serialize_false : serializer parse_false = fun input -> false_elim () /// monadic bind for the parser monad let and_then_bare (#t:Type) (#t':Type) (p:bare_parser t) (p': (t -> Tot (bare_parser t'))) : Tot (bare_parser t') = fun (b: bytes) -> match parse p b with | Some (v, l) -> begin let p'v = p' v in let s' : bytes = Seq.slice b l (Seq.length b) in match parse p'v s' with | Some (v', l') -> let res : consumed_length b = l + l' in Some (v', res) | None -> None end | None -> None let and_then_cases_injective_precond (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (x1 x2: t) (b1 b2: bytes) : GTot Type0 = Some? (parse (p' x1) b1) /\ Some? (parse (p' x2) b2) /\ ( let (Some (v1, _)) = parse (p' x1) b1 in let (Some (v2, _)) = parse (p' x2) b2 in v1 == v2 ) let and_then_cases_injective (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) : GTot Type0 = forall (x1 x2: t) (b1 b2: bytes) . {:pattern (parse (p' x1) b1); (parse (p' x2) b2)} and_then_cases_injective_precond p' x1 x2 b1 b2 ==> x1 == x2 let and_then_cases_injective_intro (#t:Type) (#t':Type) (p': (t -> Tot (bare_parser t'))) (lem: ( (x1: t) -> (x2: t) -> (b1: bytes) -> (b2: bytes) -> Lemma (requires (and_then_cases_injective_precond p' x1 x2 b1 b2)) (ensures (x1 == x2)) )) : Lemma (and_then_cases_injective p') = Classical.forall_intro_3 (fun x1 x2 b1 -> Classical.forall_intro (Classical.move_requires (lem x1 x2 b1))) let and_then_injective (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) : Lemma (requires ( injective p /\ (forall (x: t) . injective (p' x)) /\ and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') )) = let ps = and_then_bare p p' in let f (b1 b2: bytes) : Lemma (requires (injective_precond ps b1 b2)) (ensures (injective_postcond ps b1 b2)) = let (Some (v1, len1)) = p b1 in let (Some (v2, len2)) = p b2 in let b1' : bytes = Seq.slice b1 len1 (Seq.length b1) in let b2' : bytes = Seq.slice b2 len2 (Seq.length b2) in assert (Some? ((p' v1) b1')); assert (Some? ((p' v2) b2')); assert (and_then_cases_injective_precond p' v1 v2 b1' b2'); assert (v1 == v2); assert (injective_precond p b1 b2); assert ((len1 <: nat) == (len2 <: nat)); assert (injective (p' v1)); assert (injective_precond (p' v1) b1' b2'); assert (injective_postcond (p' v1) b1' b2'); let (Some (_, len1')) = (p' v1) b1' in let (Some (_, len2')) = (p' v2) b2' in assert ((len1' <: nat) == (len2' <: nat)); Seq.lemma_split (Seq.slice b1 0 (len1 + len1')) len1; Seq.lemma_split (Seq.slice b2 0 (len2 + len2')) len1; assert (injective_postcond ps b1 b2) in Classical.forall_intro_2 (fun x -> Classical.move_requires (f x)) let and_then_no_lookahead_on (#t:Type) (#t':Type) (p: bare_parser t) (p': (t -> Tot (bare_parser t'))) (x: bytes) (x' : bytes) : Lemma (requires ( no_lookahead p /\ injective p /\ (forall (x: t) . no_lookahead (p' x)) )) (ensures (no_lookahead_on (and_then_bare p p') x x')) = let f = and_then_bare p p' in match f x with | Some v -> let (y, off) = v in let off : nat = off in let (off_x : consumed_length x ) = off in if off <= Seq.length x' then let (off_x' : consumed_length x') = off in let g () : Lemma (requires (Seq.slice x' 0 off_x' == Seq.slice x 0 off_x)) (ensures ( Some? (f x') /\ ( let (Some v') = f x' in let (y', off') = v' in y == y' ))) = assert (Some? (p x)); let (Some (y1, off1)) = p x in assert (off1 <= off); assert (off1 <= Seq.length x'); assert (Seq.slice x' 0 off1 == Seq.slice (Seq.slice x' 0 off_x') 0 off1); assert (Seq.slice x' 0 off1 == Seq.slice x 0 off1); assert (no_lookahead_on p x x'); assert (Some? (p x')); let (Some v1') = p x' in let (y1', off1') = v1' in assert (y1 == y1'); assert (injective_precond p x x'); assert ((off1 <: nat) == (off1' <: nat)); let x2 : bytes = Seq.slice x off1 (Seq.length x) in let x2' : bytes = Seq.slice x' off1 (Seq.length x') in let p2 = p' y1 in assert (Some? (p2 x2)); let (Some (y2, off2)) = p2 x2 in assert (off == off1 + off2); assert (off2 <= Seq.length x2); assert (off2 <= Seq.length x2'); assert (Seq.slice x2' 0 off2 == Seq.slice (Seq.slice x' 0 off_x') off1 (off1 + off2)); assert (Seq.slice x2' 0 off2 == Seq.slice x2 0 off2); assert (no_lookahead_on p2 x2 x2'); assert (Some? (p2 x2')); let (Some v2') = p2 x2' in let (y2', _) = v2' in assert (y2 == y2') in Classical.move_requires g () else () | _ -> () inline_for_extraction let and_then_metadata (k1 k2: parser_kind_metadata_t) : Tot parser_kind_metadata_t = match k1, k2 with | Some ParserKindMetadataFail, _ -> k1 | _, Some ParserKindMetadataFail -> k2 | Some ParserKindMetadataTotal, Some ParserKindMetadataTotal -> k1 | _ -> None // unfold inline_for_extraction let and_then_kind (k1 k2: parser_kind) : Tot parser_kind = { parser_kind_low = k1.parser_kind_low + k2.parser_kind_low; parser_kind_high = begin if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then Some (some_v k1.parser_kind_high + some_v k2.parser_kind_high) else None end; parser_kind_metadata = and_then_metadata k1.parser_kind_metadata k2.parser_kind_metadata; parser_kind_subkind = begin if k2.parser_kind_subkind = Some ParserConsumesAll then Some ParserConsumesAll else if (k1.parser_kind_subkind = Some ParserStrong) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then Some ParserStrong else if (k2.parser_kind_high = Some 0) `bool_and` (k2.parser_kind_subkind = Some ParserStrong) then k1.parser_kind_subkind else None end; } let and_then_no_lookahead (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ((k.parser_kind_subkind == Some ParserStrong /\ k'.parser_kind_subkind == Some ParserStrong) ==> no_lookahead (and_then_bare p p'))) = parser_kind_prop_equiv k p; Classical.forall_intro (fun (x: t) -> parser_kind_prop_equiv k' (p' x)); if k.parser_kind_subkind = Some ParserStrong && k.parser_kind_subkind = Some ParserStrong then Classical.forall_intro_2 (fun x -> Classical.move_requires (and_then_no_lookahead_on p p' x)) else () #set-options "--max_fuel 8 --max_ifuel 8 --z3rlimit 64" let and_then_correct (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Lemma (requires ( and_then_cases_injective p' )) (ensures ( injective (and_then_bare p p') /\ parser_kind_prop (and_then_kind k k') (and_then_bare p p') )) = parser_kind_prop_equiv k p; Classical.forall_intro (fun x -> parser_kind_prop_equiv k' (p' x)); parser_kind_prop_equiv (and_then_kind k k') (and_then_bare p p'); and_then_injective p p'; and_then_no_lookahead p p' #reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" val and_then (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) : Pure (parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun _ -> True)) val and_then_eq (#k: parser_kind) (#t:Type) (p:parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (parser k' t'))) (input: bytes) : Lemma (requires (and_then_cases_injective p')) (ensures (parse (and_then p p') input == and_then_bare p p' input)) val tot_and_then (#k: parser_kind) (#t:Type) (p:tot_parser k t) (#k': parser_kind) (#t':Type) (p': (t -> Tot (tot_parser k' t'))) : Pure (tot_parser (and_then_kind k k') t') (requires ( and_then_cases_injective p' )) (ensures (fun y -> forall x . parse y x == parse (and_then #k p #k' p') x )) /// monadic return for the parser monad unfold let parse_fret' (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (bare_parser t') = fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) unfold let parse_fret (#t #t':Type) (f: t -> GTot t') (v:t) : Tot (parser parse_ret_kind t') = [@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (parse_fret' f v) in parse_fret' f v let synth_injective (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : GTot Type0 = forall (x x' : t1) . {:pattern (f x); (f x')} f x == f x' ==> x == x' let synth_injective_intro (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) : Lemma (requires (forall (x x' : t1) . f x == f x' ==> x == x')) (ensures (synth_injective f)) = () let synth_injective_intro' (#t1: Type) (#t2: Type) (f: (t1 -> GTot t2)) (prf: ( (x: t1) -> (x' : t1) -> Lemma (requires (f x == f x')) (ensures (x == x')) )) : Lemma (synth_injective f) = Classical.forall_intro_2 (fun x -> Classical.move_requires (prf x)) let parse_synth' (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Tot (bare_parser t2) = fun b -> match parse p1 b with | None -> None | Some (x1, consumed) -> Some (f2 x1, consumed) val parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) : Pure (parser k t2) (requires ( synth_injective f2 )) (ensures (fun _ -> True)) val parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) let parse_synth_eq2 (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (sq: squash (synth_injective f2)) (b: bytes) : Lemma (ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b)) = parse_synth_eq p1 f2 b val tot_parse_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) : Pure (tot_parser k t2) (requires ( synth_injective f2 )) (ensures (fun y -> forall x . parse y x == parse (parse_synth #k p1 f2) x )) let tot_parse_synth_eq (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: tot_parser k t1) (f2: t1 -> Tot t2) (b: bytes) : Lemma (requires (synth_injective f2)) (ensures (parse (tot_parse_synth p1 f2) b == parse_synth' #k p1 f2 b)) = parse_synth_eq #k p1 f2 b let bare_serialize_synth (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Tot (bare_serializer t2) = fun (x: t2) -> s1 (g1 x) val bare_serialize_synth_correct (#k: parser_kind) (#t1: Type) (#t2: Type) (p1: parser k t1) (f2: t1 -> GTot t2) (s1: serializer p1) (g1: t2 -> GTot t1) : Lemma (requires ( (forall (x : t2) . f2 (g1 x) == x) /\ (forall (x x' : t1) . f2 x == f2 x' ==> x == x') )) (ensures (serializer_correct (parse_synth p1 f2) (bare_serialize_synth p1 f2 s1 g1 ))) let synth_inverse (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : GTot Type0 = (forall (x : t2) . {:pattern (f2 (g1 x))} f2 (g1 x) == x) let synth_inverse_intro (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) : Lemma (requires (forall (x : t2) . f2 (g1 x) == x)) (ensures (synth_inverse f2 g1)) = () let synth_inverse_intro' (#t1: Type) (#t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) (prf: (x: t2) -> Lemma (f2 (g1 x) == x)) : Lemma
false
false
LowParse.Spec.Combinators.fsti
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
null
val synth_inverse_intro' (#t1 #t2: Type) (f2: (t1 -> GTot t2)) (g1: (t2 -> GTot t1)) (prf: (x: t2 -> Lemma (f2 (g1 x) == x))) : Lemma (ensures (synth_inverse f2 g1))
[]
LowParse.Spec.Combinators.synth_inverse_intro'
{ "file_name": "src/lowparse/LowParse.Spec.Combinators.fsti", "git_rev": "446a08ce38df905547cf20f28c43776b22b8087a", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
f2: (_: t1 -> Prims.GTot t2) -> g1: (_: t2 -> Prims.GTot t1) -> prf: (x: t2 -> FStar.Pervasives.Lemma (ensures f2 (g1 x) == x)) -> FStar.Pervasives.Lemma (ensures LowParse.Spec.Combinators.synth_inverse f2 g1)
{ "end_col": 28, "end_line": 719, "start_col": 2, "start_line": 719 }